[ 462.107315] env[62525]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62525) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.107638] env[62525]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62525) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.107749] env[62525]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62525) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.108038] env[62525]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 462.213590] env[62525]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62525) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 462.224956] env[62525]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62525) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 462.822616] env[62525]: INFO nova.virt.driver [None req-02a36719-bfcc-4214-b494-de63ca9227e7 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 462.894378] env[62525]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 462.894610] env[62525]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 462.894699] env[62525]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62525) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 466.176297] env[62525]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-1155ae8b-0326-4dc4-b677-b1fd8814d3a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.192836] env[62525]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62525) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 466.193045] env[62525]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-076d9a04-10fa-473f-a8f6-cce442b1a0b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.227901] env[62525]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 19084. [ 466.228083] env[62525]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.334s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 466.228611] env[62525]: INFO nova.virt.vmwareapi.driver [None req-02a36719-bfcc-4214-b494-de63ca9227e7 None None] VMware vCenter version: 7.0.3 [ 466.232294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a55fb5-6c22-43a7-8795-627e41cfc92f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.250634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882caf47-0bf1-49a4-981f-867cb3bba16b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.256750] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04d1146-2fb1-42ec-bc12-8a8d2f651846 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.263690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d73e7b-e3c2-467e-9e25-55eaa695fc80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.276745] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04143003-9f92-4a1d-aba3-3fc22cef8570 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.282753] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba065574-bcaa-419c-a5ef-fad17f92e06a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.312812] env[62525]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-025fb302-171a-4479-bb78-7d5d1768a1e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.317797] env[62525]: DEBUG nova.virt.vmwareapi.driver [None req-02a36719-bfcc-4214-b494-de63ca9227e7 None None] Extension org.openstack.compute already exists. {{(pid=62525) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 466.320420] env[62525]: INFO nova.compute.provider_config [None req-02a36719-bfcc-4214-b494-de63ca9227e7 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 466.824354] env[62525]: DEBUG nova.context [None req-02a36719-bfcc-4214-b494-de63ca9227e7 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),7294a5ce-f86a-4a0a-be3c-0dc93cebf043(cell1) {{(pid=62525) load_cells /opt/stack/nova/nova/context.py:464}} [ 466.826590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 466.826819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 466.827609] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 466.828057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Acquiring lock "7294a5ce-f86a-4a0a-be3c-0dc93cebf043" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 466.828262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Lock "7294a5ce-f86a-4a0a-be3c-0dc93cebf043" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 466.829297] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Lock "7294a5ce-f86a-4a0a-be3c-0dc93cebf043" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 466.851863] env[62525]: INFO dbcounter [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Registered counter for database nova_cell0 [ 466.859993] env[62525]: INFO dbcounter [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Registered counter for database nova_cell1 [ 466.863163] env[62525]: DEBUG oslo_db.sqlalchemy.engines [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62525) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 466.863524] env[62525]: DEBUG oslo_db.sqlalchemy.engines [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62525) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 466.868772] env[62525]: ERROR nova.db.main.api [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 466.868772] env[62525]: result = function(*args, **kwargs) [ 466.868772] env[62525]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 466.868772] env[62525]: return func(*args, **kwargs) [ 466.868772] env[62525]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 466.868772] env[62525]: result = fn(*args, **kwargs) [ 466.868772] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 466.868772] env[62525]: return f(*args, **kwargs) [ 466.868772] env[62525]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 466.868772] env[62525]: return db.service_get_minimum_version(context, binaries) [ 466.868772] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 466.868772] env[62525]: _check_db_access() [ 466.868772] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 466.868772] env[62525]: stacktrace = ''.join(traceback.format_stack()) [ 466.868772] env[62525]: [ 466.869794] env[62525]: ERROR nova.db.main.api [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 466.869794] env[62525]: result = function(*args, **kwargs) [ 466.869794] env[62525]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 466.869794] env[62525]: return func(*args, **kwargs) [ 466.869794] env[62525]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 466.869794] env[62525]: result = fn(*args, **kwargs) [ 466.869794] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 466.869794] env[62525]: return f(*args, **kwargs) [ 466.869794] env[62525]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 466.869794] env[62525]: return db.service_get_minimum_version(context, binaries) [ 466.869794] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 466.869794] env[62525]: _check_db_access() [ 466.869794] env[62525]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 466.869794] env[62525]: stacktrace = ''.join(traceback.format_stack()) [ 466.869794] env[62525]: [ 466.870181] env[62525]: WARNING nova.objects.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 466.870329] env[62525]: WARNING nova.objects.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Failed to get minimum service version for cell 7294a5ce-f86a-4a0a-be3c-0dc93cebf043 [ 466.870751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Acquiring lock "singleton_lock" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 466.870911] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Acquired lock "singleton_lock" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 466.871169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Releasing lock "singleton_lock" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 466.871487] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Full set of CONF: {{(pid=62525) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 466.871628] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ******************************************************************************** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 466.871756] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Configuration options gathered from: {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 466.871890] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 466.872090] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 466.872223] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ================================================================================ {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 466.872436] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] allow_resize_to_same_host = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.872598] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] arq_binding_timeout = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.872731] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] backdoor_port = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.872858] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] backdoor_socket = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873064] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] block_device_allocate_retries = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873224] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] block_device_allocate_retries_interval = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873391] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cert = self.pem {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873556] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873724] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute_monitors = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.873894] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] config_dir = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874085] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] config_drive_format = iso9660 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874248] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874425] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] config_source = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874594] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] console_host = devstack {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874760] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] control_exchange = nova {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.874919] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cpu_allocation_ratio = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875094] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] daemon = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875269] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] debug = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875428] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_access_ip_network_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875590] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_availability_zone = nova {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875745] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_ephemeral_format = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.875901] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_green_pool_size = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876157] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876324] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] default_schedule_zone = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876481] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] disk_allocation_ratio = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876640] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] enable_new_services = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876814] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] enabled_apis = ['osapi_compute'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.876978] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] enabled_ssl_apis = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.877145] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] flat_injected = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.877306] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] force_config_drive = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.877463] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] force_raw_images = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.877627] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] graceful_shutdown_timeout = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.877785] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] heal_instance_info_cache_interval = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878011] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] host = cpu-1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878189] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878355] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878517] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878737] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.878902] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_build_timeout = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879073] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_delete_interval = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879255] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_format = [instance: %(uuid)s] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879421] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_name_template = instance-%08x {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879582] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_usage_audit = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879750] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_usage_audit_period = month {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.879912] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880088] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880257] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] internal_service_availability_zone = internal {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880410] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] key = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880568] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] live_migration_retry_count = 30 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880729] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_color = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.880891] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_config_append = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881070] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881233] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_dir = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881388] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881517] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_options = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881674] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_rotate_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881836] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_rotate_interval_type = days {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.881999] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] log_rotation_type = none {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883063] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883063] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883063] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883063] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883063] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883319] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] long_rpc_timeout = 1800 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883319] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_concurrent_builds = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883319] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_concurrent_live_migrations = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883319] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_concurrent_snapshots = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883483] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_local_block_devices = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883632] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_logfile_count = 30 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883787] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] max_logfile_size_mb = 200 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.883943] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] maximum_instance_delete_attempts = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.884176] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metadata_listen = 0.0.0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.884365] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metadata_listen_port = 8775 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.884534] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metadata_workers = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.884695] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] migrate_max_retries = -1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.884860] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] mkisofs_cmd = genisoimage {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885077] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885215] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] my_ip = 10.180.1.21 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885376] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] network_allocate_retries = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885554] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885719] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.885882] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] osapi_compute_listen_port = 8774 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886056] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] osapi_compute_unique_server_name_scope = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886229] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] osapi_compute_workers = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886391] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] password_length = 12 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886553] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] periodic_enable = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886710] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] periodic_fuzzy_delay = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.886877] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] pointer_model = usbtablet {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887052] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] preallocate_images = none {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887219] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] publish_errors = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887347] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] pybasedir = /opt/stack/nova {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887504] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ram_allocation_ratio = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887662] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rate_limit_burst = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887827] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rate_limit_except_level = CRITICAL {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.887981] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rate_limit_interval = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888153] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reboot_timeout = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888312] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reclaim_instance_interval = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888467] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] record = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888629] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reimage_timeout_per_gb = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888791] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] report_interval = 120 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.888946] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rescue_timeout = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889119] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reserved_host_cpus = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889279] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reserved_host_disk_mb = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889436] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reserved_host_memory_mb = 512 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889590] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] reserved_huge_pages = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889748] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] resize_confirm_window = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.889902] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] resize_fs_using_block_device = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890068] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] resume_guests_state_on_host_boot = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890239] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890400] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] rpc_response_timeout = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890558] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] run_external_periodic_tasks = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890722] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] running_deleted_instance_action = reap {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.890879] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891048] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] running_deleted_instance_timeout = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891209] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler_instance_sync_interval = 120 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891375] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_down_time = 720 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891542] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] servicegroup_driver = db {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891696] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] shell_completion = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.891855] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] shelved_offload_time = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892029] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] shelved_poll_interval = 3600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892205] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] shutdown_timeout = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892368] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] source_is_ipv6 = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892528] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ssl_only = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892771] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.892937] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] sync_power_state_interval = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893132] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] sync_power_state_pool_size = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893315] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] syslog_log_facility = LOG_USER {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893474] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] tempdir = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893632] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] timeout_nbd = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893798] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] transport_url = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.893960] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] update_resources_interval = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894177] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_cow_images = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894353] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_eventlog = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894515] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_journal = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894670] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_json = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894829] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_rootwrap_daemon = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.894989] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_stderr = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895164] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] use_syslog = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895319] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vcpu_pin_set = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895486] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plugging_is_fatal = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895651] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plugging_timeout = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895813] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] virt_mkfs = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.895971] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] volume_usage_poll_interval = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.896146] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] watch_log_file = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.896315] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] web = /usr/share/spice-html5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 466.896494] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.896658] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.896816] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.896982] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_concurrency.disable_process_locking = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.897540] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.897733] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.897907] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898095] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898274] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898442] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898626] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.auth_strategy = keystone {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898792] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.compute_link_prefix = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.898968] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899159] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.dhcp_domain = novalocal {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899333] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.enable_instance_password = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899496] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.glance_link_prefix = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899660] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899830] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.899993] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.instance_list_per_project_cells = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.900172] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.list_records_by_skipping_down_cells = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.900340] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.local_metadata_per_cell = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.900508] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.max_limit = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.900673] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.metadata_cache_expiration = 15 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.900845] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.neutron_default_tenant_id = default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901034] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.response_validation = warn {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901214] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.use_neutron_default_nets = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901387] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901548] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901713] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.901885] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902066] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_dynamic_targets = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902234] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_jsonfile_path = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902413] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902602] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.backend = dogpile.cache.memcached {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902767] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.backend_argument = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.902927] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.backend_expiration_time = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.903140] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.config_prefix = cache.oslo {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.903325] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.dead_timeout = 60.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.903494] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.debug_cache_backend = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.903658] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.enable_retry_client = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.903821] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.enable_socket_keepalive = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904047] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.enabled = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904198] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.enforce_fips_mode = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904375] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.expiration_time = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904542] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.hashclient_retry_attempts = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904708] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.904871] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_dead_retry = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905044] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_password = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905216] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905383] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905544] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_pool_maxsize = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905705] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.905867] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_sasl_enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906057] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906230] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906388] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.memcache_username = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906552] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.proxies = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906711] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_db = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.906867] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_password = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907047] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907228] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907395] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_server = localhost:6379 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907560] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_socket_timeout = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907716] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.redis_username = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.907876] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.retry_attempts = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908054] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.retry_delay = 0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908223] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.socket_keepalive_count = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908387] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.socket_keepalive_idle = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908545] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.socket_keepalive_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908701] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.tls_allowed_ciphers = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.908861] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.tls_cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909029] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.tls_certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909197] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.tls_enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909356] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cache.tls_keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909525] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909697] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.auth_type = password {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.909855] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910042] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910206] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910370] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910529] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.cross_az_attach = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910690] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.debug = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.910850] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.endpoint_template = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911044] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.http_retries = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911195] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911352] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911525] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.os_region_name = RegionOne {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911690] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.911850] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cinder.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912029] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912196] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.cpu_dedicated_set = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912354] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.cpu_shared_set = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912517] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.image_type_exclude_list = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912678] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.912838] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913017] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913213] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913390] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913555] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.resource_provider_association_refresh = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913718] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.913880] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.shutdown_retry_interval = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914073] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914286] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] conductor.workers = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914471] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] console.allowed_origins = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914632] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] console.ssl_ciphers = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914800] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] console.ssl_minimum_version = default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.914968] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] consoleauth.enforce_session_timeout = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915153] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] consoleauth.token_ttl = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915332] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915485] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915646] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915803] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.915962] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916137] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916297] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916450] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916603] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916756] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.916912] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917086] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917248] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917413] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.service_type = accelerator {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917574] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917729] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.917883] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918048] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918232] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918390] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] cyborg.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918568] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.backend = sqlalchemy {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918737] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.connection = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.918902] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.connection_debug = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919084] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.connection_parameters = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919257] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.connection_recycle_time = 3600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919421] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.connection_trace = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919583] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.db_inc_retry_interval = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919746] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.db_max_retries = 20 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.919910] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.db_max_retry_interval = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920085] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.db_retry_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920255] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.max_overflow = 50 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920417] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.max_pool_size = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920578] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.max_retries = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920747] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.920914] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.mysql_wsrep_sync_wait = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921119] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.pool_timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921303] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.retry_interval = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921465] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.slave_connection = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921628] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.sqlite_synchronous = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921791] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] database.use_db_reconnect = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.921966] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.backend = sqlalchemy {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922150] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.connection = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922318] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.connection_debug = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922485] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.connection_parameters = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922647] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.connection_recycle_time = 3600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922810] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.connection_trace = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.922973] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.db_inc_retry_interval = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923172] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.db_max_retries = 20 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923342] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.db_max_retry_interval = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923502] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.db_retry_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923661] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.max_overflow = 50 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923821] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.max_pool_size = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.923981] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.max_retries = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924170] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924332] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924490] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.pool_timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924650] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.retry_interval = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924807] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.slave_connection = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.924968] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] api_database.sqlite_synchronous = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925156] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] devices.enabled_mdev_types = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925338] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925504] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925664] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ephemeral_storage_encryption.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925824] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.925995] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.api_servers = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926172] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926334] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926495] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926651] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926810] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.926969] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.debug = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927149] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.default_trusted_certificate_ids = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927313] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.enable_certificate_validation = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927473] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.enable_rbd_download = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927630] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927793] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.927952] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928122] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928281] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928441] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.num_retries = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928605] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.rbd_ceph_conf = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928763] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.rbd_connect_timeout = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.928927] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.rbd_pool = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929104] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.rbd_user = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929265] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929421] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929575] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929737] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.service_type = image {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.929902] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930070] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930231] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930386] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930563] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930724] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.verify_glance_signatures = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.930881] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] glance.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.931082] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] guestfs.debug = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.931257] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] mks.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.931601] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.931789] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.manager_interval = 2400 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.931959] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.precache_concurrency = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.932143] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.remove_unused_base_images = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.932317] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.932485] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.932662] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] image_cache.subdirectory_name = _base {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.932837] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.api_max_retries = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933008] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.api_retry_interval = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933206] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933375] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.auth_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933536] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933694] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.933859] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934034] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.conductor_group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934203] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934364] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934521] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934683] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934840] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.934999] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.935173] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.935353] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.peer_list = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.935523] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.935683] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.935845] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.serial_console_state_timeout = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936008] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936186] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.service_type = baremetal {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936347] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.shard = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936514] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936671] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936828] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.936985] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.937179] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.937341] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ironic.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.937520] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.937689] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] key_manager.fixed_key = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.937863] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938032] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.barbican_api_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938195] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.barbican_endpoint = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938366] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.barbican_endpoint_type = public {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938521] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.barbican_region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938675] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938829] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.938987] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939163] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939324] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939484] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.number_of_retries = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939643] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.retry_delay = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939803] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.send_service_user_token = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.939962] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940131] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940295] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.verify_ssl = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940453] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican.verify_ssl_path = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940631] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940787] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.auth_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.940943] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941109] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941274] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941432] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941587] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941746] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.941900] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] barbican_service_user.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942075] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.approle_role_id = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942236] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.approle_secret_id = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942402] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.kv_mountpoint = secret {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942557] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.kv_path = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942717] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.kv_version = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.942873] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.namespace = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943067] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.root_token_id = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943223] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.ssl_ca_crt_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943386] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.timeout = 60.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943547] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.use_ssl = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943712] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.943876] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944062] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944262] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944427] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944586] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944745] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.944910] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945082] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945248] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945427] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945593] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945749] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.945904] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.946083] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.service_type = identity {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.946286] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.946508] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.946683] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.946843] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.947053] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.947266] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] keystone.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.947585] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.connection_uri = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.947873] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_mode = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.948205] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.948505] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_models = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.948823] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_power_governor_high = performance {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.949150] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.949455] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_power_management = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.949773] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.950100] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.device_detach_attempts = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.950417] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.device_detach_timeout = 20 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.950726] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.disk_cachemodes = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.951042] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.disk_prefix = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.951360] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.enabled_perf_events = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.951678] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.file_backed_memory = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.951998] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.gid_maps = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.952323] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.hw_disk_discard = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.952634] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.hw_machine_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.952905] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_rbd_ceph_conf = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.953136] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.953318] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.953498] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_rbd_glance_store_name = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.953673] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_rbd_pool = rbd {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.953868] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_type = default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954068] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.images_volume_group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954246] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.inject_key = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954410] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.inject_partition = -2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954572] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.inject_password = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954735] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.iscsi_iface = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.954897] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.iser_use_multipath = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955075] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955246] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955408] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_downtime = 500 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955570] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955732] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.955891] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_inbound_addr = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956074] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956267] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956435] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_scheme = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956611] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_timeout_action = abort {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956777] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_tunnelled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.956939] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_uri = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.957118] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.live_migration_with_native_tls = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.957284] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.max_queues = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.957448] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.957674] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.957837] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.nfs_mount_options = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958146] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958321] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958488] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958648] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958812] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.958976] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_pcie_ports = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.959159] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.959330] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.pmem_namespaces = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.959490] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.quobyte_client_cfg = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.959790] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.959966] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960154] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960323] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960482] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rbd_secret_uuid = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960640] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rbd_user = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960801] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.960968] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961163] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rescue_image_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961336] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rescue_kernel_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961497] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rescue_ramdisk_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961668] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961830] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.rx_queue_size = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.961996] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.smbfs_mount_options = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.962305] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.962484] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.snapshot_compression = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.962649] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.snapshot_image_format = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.962877] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963072] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.sparse_logical_volumes = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963254] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.swtpm_enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963425] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.swtpm_group = tss {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963594] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.swtpm_user = tss {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963765] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.sysinfo_serial = unique {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.963925] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.tb_cache_size = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964117] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.tx_queue_size = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964294] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.uid_maps = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964460] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.use_virtio_for_bridges = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964629] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.virt_type = kvm {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964797] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.volume_clear = zero {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.964957] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.volume_clear_size = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965137] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.volume_use_multipath = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965298] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_cache_path = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965465] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965633] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965799] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.965965] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.966273] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.966461] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.vzstorage_mount_user = stack {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.966632] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.966810] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.966985] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.auth_type = password {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967162] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967323] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967485] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967642] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967798] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.967963] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.default_floating_pool = public {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968154] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968333] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.extension_sync_interval = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968497] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.http_retries = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968657] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968811] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.968970] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969153] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969318] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969485] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.ovs_bridge = br-int {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969651] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.physnets = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969819] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.region_name = RegionOne {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.969979] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970161] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.service_metadata_proxy = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970323] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970490] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.service_type = network {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970651] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970806] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.970962] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.971133] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.971324] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.971489] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] neutron.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.971661] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] notifications.bdms_in_notifications = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.971837] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] notifications.default_level = INFO {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972024] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] notifications.notification_format = unversioned {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972196] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] notifications.notify_on_state_change = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972370] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972544] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] pci.alias = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972712] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] pci.device_spec = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.972875] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] pci.report_in_placement = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973093] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973280] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.auth_type = password {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973450] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973610] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973767] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.973926] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974116] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974294] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974455] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.default_domain_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974612] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.default_domain_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974765] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.domain_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.974919] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.domain_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975085] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975252] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975404] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975558] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975708] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.975870] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.password = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976038] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.project_domain_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976208] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.project_domain_name = Default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976371] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.project_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976539] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.project_name = service {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976703] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.region_name = RegionOne {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.976866] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977034] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977205] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.service_type = placement {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977367] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977522] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977682] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977840] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.system_scope = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.977993] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978168] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.trust_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978326] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.user_domain_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978492] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.user_domain_name = Default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978649] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.user_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978816] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.username = nova {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.978995] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979170] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] placement.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979348] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.cores = 20 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979508] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.count_usage_from_placement = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979674] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979837] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.injected_file_content_bytes = 10240 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.979998] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.injected_file_path_length = 255 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.980182] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.injected_files = 5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.980346] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.instances = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.980511] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.key_pairs = 100 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.980674] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.metadata_items = 128 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.980838] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.ram = 51200 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981014] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.recheck_quota = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981193] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.server_group_members = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981360] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] quota.server_groups = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981536] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981701] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.981862] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.image_metadata_prefilter = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982036] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982210] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.max_attempts = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982373] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.max_placement_results = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982539] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982696] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.982858] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983051] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] scheduler.workers = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983237] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983413] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983593] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983763] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.983933] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.984136] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.984338] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.984550] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.984726] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.host_subset_size = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.984894] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985069] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985242] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985408] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.isolated_hosts = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985570] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.isolated_images = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985729] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.985886] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986059] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986226] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.pci_in_placement = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986389] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986549] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986708] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.986866] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987048] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987219] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987380] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.track_instance_changes = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987556] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987727] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metrics.required = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.987892] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metrics.weight_multiplier = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.988076] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.988247] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] metrics.weight_setting = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.988560] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.988736] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.988912] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.port_range = 10000:20000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989096] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989273] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989440] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] serial_console.serialproxy_port = 6083 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989608] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989780] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.auth_type = password {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.989942] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990107] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990273] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990433] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990589] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990757] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.send_service_user_token = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.990921] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.991091] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] service_user.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.991278] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.agent_enabled = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.991443] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.991752] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.991953] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.992138] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.html5proxy_port = 6082 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.992301] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.image_compression = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.992460] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.jpeg_compression = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.992648] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.playback_compression = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.992842] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.require_secure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.993079] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.server_listen = 127.0.0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.993269] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.993466] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.streaming_mode = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.993647] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] spice.zlib_compression = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.993856] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] upgrade_levels.baseapi = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.994082] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] upgrade_levels.compute = auto {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.994265] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] upgrade_levels.conductor = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.994450] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] upgrade_levels.scheduler = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.994645] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.994843] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995041] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995211] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995383] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995537] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995725] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.995915] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996119] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vendordata_dynamic_auth.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996323] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.api_retry_count = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996489] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.ca_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996662] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996828] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.cluster_name = testcl1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.996989] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.connection_pool_size = 10 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.997165] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.console_delay_seconds = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.997335] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.datastore_regex = ^datastore.* {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.997547] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.997752] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.host_password = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.997958] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.host_port = 443 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.998192] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.host_username = administrator@vsphere.local {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.998392] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.insecure = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.998556] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.integration_bridge = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.998718] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.maximum_objects = 100 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.998878] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.pbm_default_policy = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999052] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.pbm_enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999419] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.pbm_wsdl_location = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999419] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999530] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.serial_port_proxy_uri = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999685] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.serial_port_service_uri = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 466.999852] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.task_poll_interval = 0.5 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000033] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.use_linked_clone = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000208] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.vnc_keymap = en-us {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000374] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.vnc_port = 5900 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000536] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vmware.vnc_port_total = 10000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000731] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.auth_schemes = ['none'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.000908] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.001232] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.001419] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.001590] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.novncproxy_port = 6080 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.001784] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.server_listen = 127.0.0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.001964] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002140] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.vencrypt_ca_certs = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002300] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.vencrypt_client_cert = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002456] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vnc.vencrypt_client_key = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002633] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002795] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_deep_image_inspection = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.002955] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.003163] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.003327] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.003489] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.disable_rootwrap = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.003689] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.enable_numa_live_migration = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.003897] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004131] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004317] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004480] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.libvirt_disable_apic = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004642] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004802] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.004962] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005138] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005299] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005456] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005611] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005766] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.005922] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.006106] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.006293] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.006459] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.client_socket_timeout = 900 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.006628] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.default_pool_size = 1000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.006847] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.keep_alive = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007037] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.max_header_line = 16384 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007207] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007368] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.ssl_ca_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007526] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.ssl_cert_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007684] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.ssl_key_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.007844] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.tcp_keepidle = 600 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.008032] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.008204] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] zvm.ca_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.008365] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] zvm.cloud_connector_url = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.008659] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.008833] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] zvm.reachable_timeout = 300 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.009021] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.enforce_new_defaults = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.009401] env[62525]: WARNING oslo_config.cfg [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 467.009587] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.enforce_scope = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.009760] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.policy_default_rule = default {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.009935] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010118] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.policy_file = policy.yaml {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010296] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010456] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010615] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010771] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.010932] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011115] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_policy.remote_timeout = 60.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011290] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011464] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011638] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.connection_string = messaging:// {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011803] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.enabled = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.011969] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.es_doc_type = notification {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012146] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.es_scroll_size = 10000 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012315] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.es_scroll_time = 2m {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012477] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.filter_error_trace = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012644] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.hmac_keys = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012808] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.sentinel_service_name = mymaster {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.012969] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.socket_timeout = 0.1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.013181] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.trace_requests = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.013344] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler.trace_sqlalchemy = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.013525] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler_jaeger.process_tags = {} {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.013685] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler_jaeger.service_name_prefix = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.013846] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] profiler_otlp.service_name_prefix = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014026] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] remote_debug.host = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014244] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] remote_debug.port = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014454] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014620] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014782] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.014943] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015119] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015284] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015443] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015603] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015760] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.015926] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016099] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016272] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016438] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016603] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016771] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.016936] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.017117] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.017335] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.017526] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.017693] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.017858] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018034] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018206] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018372] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018533] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018693] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.018852] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019020] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019196] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019360] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019531] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019700] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.019862] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020042] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020221] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020383] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020568] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020735] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_notifications.retry = -1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.020917] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021107] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021300] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.auth_section = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021475] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.auth_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021633] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.cafile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021788] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.certfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.021953] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.collect_timing = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022126] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.connect_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022288] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.connect_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022443] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_id = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022609] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022766] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_override = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.022920] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023117] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023283] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.endpoint_service_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023451] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.insecure = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023608] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.keyfile = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023765] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.max_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.023922] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.min_version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024102] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.region_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024282] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.retriable_status_codes = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024442] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.service_name = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024597] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.service_type = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024759] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.split_loggers = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.024913] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.status_code_retries = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025125] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.status_code_retry_delay = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025252] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.timeout = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025406] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.valid_interfaces = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025560] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_limit.version = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025721] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_reports.file_event_handler = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.025881] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026048] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] oslo_reports.log_dir = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026225] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026382] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026539] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026699] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.026861] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027027] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027202] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027358] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027512] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027673] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027833] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.027987] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] vif_plug_ovs_privileged.user = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.028169] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.028347] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.028520] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.028688] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.028856] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029029] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029198] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029353] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029524] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029690] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.isolate_vif = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.029853] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030023] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030194] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030357] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030519] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] os_vif_ovs.per_port_bridge = False {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030683] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.capabilities = [21] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030836] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.030988] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.helper_command = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031163] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031328] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031482] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] privsep_osbrick.user = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031652] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031807] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.group = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.031961] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.helper_command = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.032142] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.032307] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.032458] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] nova_sys_admin.user = None {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 467.032582] env[62525]: DEBUG oslo_service.service [None req-3a8f9d4f-c682-4cac-b8dc-23e9e195c044 None None] ******************************************************************************** {{(pid=62525) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 467.033099] env[62525]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 467.536756] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Getting list of instances from cluster (obj){ [ 467.536756] env[62525]: value = "domain-c8" [ 467.536756] env[62525]: _type = "ClusterComputeResource" [ 467.536756] env[62525]: } {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 467.537994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11a1294-d397-4cd0-9756-ad9ad24501c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.547322] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Got total of 0 instances {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 467.547872] env[62525]: WARNING nova.virt.vmwareapi.driver [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 467.548358] env[62525]: INFO nova.virt.node [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Generated node identity bb89c0ac-8f56-43c6-9f73-fd897be63424 [ 467.548601] env[62525]: INFO nova.virt.node [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Wrote node identity bb89c0ac-8f56-43c6-9f73-fd897be63424 to /opt/stack/data/n-cpu-1/compute_id [ 468.051696] env[62525]: WARNING nova.compute.manager [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Compute nodes ['bb89c0ac-8f56-43c6-9f73-fd897be63424'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 469.057589] env[62525]: INFO nova.compute.manager [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 470.063636] env[62525]: WARNING nova.compute.manager [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 470.063888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.064086] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.064254] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.064397] env[62525]: DEBUG nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 470.066710] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1a00d5-20e0-4a38-a268-30afa934b73a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.074710] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b46f33-871a-4503-b332-be62fa32b578 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.088946] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe46cfb-797d-4921-86b7-1ff443c66e4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.095365] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ebd2f2-2858-4688-8f4a-6333754cd364 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.123782] env[62525]: DEBUG nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181321MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 470.123925] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.124123] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.626453] env[62525]: WARNING nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] No compute node record for cpu-1:bb89c0ac-8f56-43c6-9f73-fd897be63424: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host bb89c0ac-8f56-43c6-9f73-fd897be63424 could not be found. [ 471.130545] env[62525]: INFO nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: bb89c0ac-8f56-43c6-9f73-fd897be63424 [ 472.638386] env[62525]: DEBUG nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 472.638702] env[62525]: DEBUG nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 472.789855] env[62525]: INFO nova.scheduler.client.report [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] [req-a6ad8d31-6dc2-43d4-8cc9-965d636eda36] Created resource provider record via placement API for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 472.806447] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58269190-7ec7-401c-b615-856bee94eea2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.814853] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb107484-b67f-4875-8b0f-f4a860915348 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.843932] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b727223-3b36-4108-8e18-f9b973a09f60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.850646] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce699fc-6073-4b16-a94a-c76ba040cdd9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 472.863283] env[62525]: DEBUG nova.compute.provider_tree [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 473.398886] env[62525]: DEBUG nova.scheduler.client.report [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 473.399134] env[62525]: DEBUG nova.compute.provider_tree [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 0 to 1 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 473.399274] env[62525]: DEBUG nova.compute.provider_tree [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 473.446951] env[62525]: DEBUG nova.compute.provider_tree [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 1 to 2 during operation: update_traits {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 473.951226] env[62525]: DEBUG nova.compute.resource_tracker [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 473.951584] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.827s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 473.951584] env[62525]: DEBUG nova.service [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Creating RPC server for service compute {{(pid=62525) start /opt/stack/nova/nova/service.py:186}} [ 473.965642] env[62525]: DEBUG nova.service [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] Join ServiceGroup membership for this service compute {{(pid=62525) start /opt/stack/nova/nova/service.py:203}} [ 473.965848] env[62525]: DEBUG nova.servicegroup.drivers.db [None req-b64d1e80-5058-4b15-8472-2d5d5b0116ce None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62525) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 491.970624] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_power_states {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 492.474021] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Getting list of instances from cluster (obj){ [ 492.474021] env[62525]: value = "domain-c8" [ 492.474021] env[62525]: _type = "ClusterComputeResource" [ 492.474021] env[62525]: } {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 492.475313] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d74004-d1d4-4320-aa05-8cedbe58dc24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.483848] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Got total of 0 instances {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 492.484087] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 492.484412] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Getting list of instances from cluster (obj){ [ 492.484412] env[62525]: value = "domain-c8" [ 492.484412] env[62525]: _type = "ClusterComputeResource" [ 492.484412] env[62525]: } {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 492.485251] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93f8349-a26c-466e-aec2-ae6e4669b287 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.492292] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Got total of 0 instances {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 522.274294] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.274736] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.274921] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 522.275093] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 522.778618] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 522.778796] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779050] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779276] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779467] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779651] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779832] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 522.779997] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 522.780166] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.283502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.283997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.284183] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.284382] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 523.285648] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63581f78-bd04-4ef3-a0a2-3aa22dedfd71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.293942] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67f8867-0cb7-400e-8902-4974793c0f3a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.307741] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a715ef4d-e43d-4f30-8704-ce80f73608ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.313977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37790807-a43c-4928-ab73-2106a9b8c6a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.343281] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181321MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 523.343487] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.343747] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.361642] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 524.361884] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 524.375654] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e764fd82-67ba-4106-9030-a56bc867ba0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.383175] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf0b8bd-da94-41ef-bcb2-929896f60251 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.415403] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c51f96-3322-4406-a84b-69ecfc04e6e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.422168] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9013e2-c412-4a83-b2e3-6c888340954d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.434868] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.938643] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 525.444642] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 525.445086] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.101s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.432283] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.432733] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.938103] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.938295] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 585.938412] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 586.442401] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 586.442714] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.442810] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.442939] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.443144] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.443305] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.443449] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.443590] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 586.443727] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 586.946943] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.947224] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.947393] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.947551] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 586.948502] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec781e8-38b9-4534-8a38-d4c1c0857832 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.957543] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9298849f-62a0-4e3b-b07b-7b8ea1a47d36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.971568] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914ffd9c-744d-40a5-a977-d71b3095c4c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.977716] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd929e8-4c85-4b6d-a91c-c755bb0dda0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.006373] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181332MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 587.006511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.006694] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.024855] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 588.025103] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 588.038136] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77f6fae-d201-4f48-9df1-b69b1ba6dce3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.046416] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3f3683-2a84-46ac-bc0c-6f233d3350d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.078600] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869d968e-6b5c-421c-9f68-fa5d3cf960f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.086158] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022720fa-cf64-4157-aec9-29a198e56686 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.100106] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.602900] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.604265] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 588.604446] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.598s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.606529] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.607103] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.607103] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 648.607103] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 649.109656] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111683] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.111940] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 649.111940] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.614654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.615051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.615183] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.615264] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 649.616217] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3169d0c2-49af-44c0-b01f-9dae7f2db622 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.624787] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b860b809-2999-48a1-9fe8-dd78d0a7e254 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.638604] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bee0cdd-1c69-4501-9754-d8c32a0f2a2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.644733] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f298bf4-001f-41cc-8c79-cc61a53b5e55 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.674048] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 649.674236] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.674407] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.692788] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 650.693038] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 650.706605] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7874c150-1bf3-4dd5-8f1d-45bba7d1e181 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.714051] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d338ea2c-5123-4a61-a354-b352c26e29e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.742998] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3eee1f-f156-4136-b041-70b4482078dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.749954] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2b673b-905e-445a-a1c1-1f661f797a4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.762551] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.266057] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.267333] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 651.267520] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.923182] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 708.923593] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.429706] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.429706] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 709.429706] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 709.931069] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 709.931509] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.931509] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.931620] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.931719] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.931855] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.931995] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.932132] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 709.932271] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.435943] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.436298] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.436413] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.436569] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 710.437651] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7768f5-aae4-46c6-b41e-801da243fffb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.446261] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36647e7-417e-4a12-8668-2344fce7d45c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.459839] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969213a0-34f1-41ac-90b8-80dc9ead68d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.465911] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e42762f-1155-4a2e-a764-e3f8f130ab06 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.493806] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181399MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 710.493990] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.494152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.512067] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 711.512346] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 711.524982] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85af244-b9ba-4421-b075-81e82e2484d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.533650] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7434951a-f800-4a28-96dc-90c44b5f30a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.564110] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092ae573-95b3-4775-84f3-58c0acac93fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.571217] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0ea83a-c79a-48a4-8b71-78b82879f229 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.584222] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.087821] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.089161] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 712.089346] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.266589] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.266990] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 762.773021] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] There are 0 instances to clean {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 762.773021] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.773021] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances with incomplete migration {{(pid=62525) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 763.275971] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.777811] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.778202] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 764.778202] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 766.266980] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.267433] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 766.267433] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 766.770139] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 766.770385] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 766.770540] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 767.274479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.274917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.274989] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.275136] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 767.276036] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0433951-8dbf-4ac7-9f21-fcaaa800c257 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.284395] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d672ab-f3ab-4b4f-bed7-d14018c8e7eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.297925] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9083b4-825a-46c5-8ac1-042ff56d5a44 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.304658] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b480675-1a95-4efa-8102-c08668b49fa7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.332522] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181399MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 767.332690] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.332831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.349859] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 768.350124] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 768.362289] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e4d762-74cf-434e-96ad-c17dcc8be1ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.369633] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec538c7-a8d4-4436-80e9-4ce74872f9ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.398315] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658adfac-0651-420f-a086-718c84af00d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.404979] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef73b883-c8dd-4eea-929f-8ec8ab57f555 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.417538] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.920617] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.921869] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 768.922057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.589s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.418566] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 769.418968] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 769.418968] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 769.419161] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.266759] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.267161] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 826.268309] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.262242] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.265854] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.266179] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.266551] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 828.266551] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 828.769532] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 828.769786] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.272572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.272835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.273014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.273140] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 829.274085] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828a4f9b-a12e-4d3c-aa2f-096e229415b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.282063] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37883817-5510-46b4-bb9d-6db6f0fb279f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.296668] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798fdfba-594b-4bed-8397-271f9343d7b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.302780] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6871263d-4ab6-46bd-8581-024fb8771038 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.330888] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 829.331034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.331213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.364631] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 830.364869] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 830.382446] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 830.396184] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 830.396358] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.407199] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 830.421612] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 830.432910] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbda7010-8787-4734-9bd4-6388b9bc285a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.439787] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce44bc-cb6d-42fc-9030-47d1d3e2dfd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.469237] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e52574-1621-40cb-bd38-9b5455f8cab6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.475668] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3851949-d1f9-48c4-a531-ee63fed850d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.488009] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.991547] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.992893] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 830.993131] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.662s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.489828] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.995016] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.995364] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.995605] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.267254] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.267691] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 887.266279] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 888.266557] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 889.262025] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.265845] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.266332] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 890.266332] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 890.769608] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 890.769847] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.770030] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 890.770190] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.273828] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.274187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.274229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.274441] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 891.275376] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc575dc6-89db-444e-89a9-1afc8becbb5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.283417] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212608c2-5ea8-41bc-b87d-acd4efc7e409 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.297677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8053c227-70fb-4a7c-92ee-01b3e4933112 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.303890] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0665b29f-0fbd-4530-baf0-4c9340211b5b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.332208] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181391MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 891.332386] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.332558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.351022] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 892.351334] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 892.363467] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592a91ef-7960-48e3-87fb-150ef7be2ef6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.371028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d869d3-476a-42f2-8db7-895479384ebd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.401368] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9713ad1c-270b-40f3-b06c-5fcbd3ecbac5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.408750] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731b26f0-7334-491b-a75f-04c962fff62d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.421719] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.924987] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 892.926285] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 892.926466] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.422884] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.266786] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.266786] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 948.266988] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.267303] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.271977] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 950.271977] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 950.770421] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 950.770637] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.770798] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 951.273689] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.274152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.274215] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.274323] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 951.275250] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e81589-e697-40d7-a2e7-0428832d62ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.283853] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be164552-f88d-4396-a5ec-4a9fcd6e5d0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.297419] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4bce2b-36fc-4c5e-a2f6-28df6ac4badd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.303476] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5097c7a2-c9ef-4e51-9d28-0a94732a10c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.332304] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181394MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 951.332443] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.332620] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.350407] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 952.350683] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 952.362680] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3296740-8348-4feb-bb7f-898d11fa2d31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.370199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c32ae7-9d8f-465f-a9ed-e5532f24977c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.399644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1cc0f9-42a5-40d2-9d57-6e1c7a3abfee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.406484] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d07741a-bcdb-4ec5-b5fe-cb744e40b224 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.419629] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.922667] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.923978] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 952.924155] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.419631] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.420115] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.420272] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.420490] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.262579] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.267323] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.267678] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.267678] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1011.266971] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1012.265854] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1012.266049] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1012.266160] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1012.769498] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1012.769755] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1013.273359] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.273599] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.273744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.273914] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1013.274894] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7636e2a-4602-4fe2-8d16-e9d6e70ddee6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.283224] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdb075c-e2b2-4564-bf0c-058aba813eb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.297315] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97120925-f835-44d8-8757-cf3eaa2ab899 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.303474] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c64e7a7-63ea-4885-b372-64075f706358 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.332615] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181398MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1013.332752] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.332928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.350382] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1014.350647] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1014.363750] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26913eca-ef21-438d-b3f0-8f7a81f1aac5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.371896] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f373895b-e3a7-4cf5-9547-654600fb6cbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.401189] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3791a95f-0665-44e6-9718-be8183825c26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.408856] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033d7b97-c052-4554-b67f-7b8ef715421b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.422448] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.925363] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.926729] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1014.926945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.423597] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.424026] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.424026] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.424160] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.268238] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.268664] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances with incomplete migration {{(pid=62525) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1069.769580] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.265831] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.266071] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1071.266665] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1071.267089] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1071.770022] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] There are 0 instances to clean {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1072.770016] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1073.273770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.274036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.274228] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.274393] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1073.275311] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c296744-972a-43cc-b519-3cf68f77df96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.283553] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31290c38-a7b8-44b6-87eb-c303da8eb8e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.297213] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862c5a74-8a55-4aa7-bfe9-6eb9e00eaa41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.303408] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e147561-2a6c-46ab-b6e7-83eb93a60573 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.332225] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1073.332349] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.332535] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.351016] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1074.351253] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1074.364560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f37e9d9-eb3e-4398-9327-92cebcbf4ef9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.372465] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a678861-3d68-437f-abed-4e51d62529fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.401398] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9dd9f1-159c-4615-986e-2079e92e4851 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.408784] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73579e5-4218-47e6-83e4-4aa5a542b2b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.421218] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.924650] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1074.925900] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1074.926123] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.926345] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.919684] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.920072] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.920072] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1075.920145] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1076.422641] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1076.422876] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.423334] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.423857] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.423857] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.766053] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.974679] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_power_states {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.477194] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Getting list of instances from cluster (obj){ [ 1092.477194] env[62525]: value = "domain-c8" [ 1092.477194] env[62525]: _type = "ClusterComputeResource" [ 1092.477194] env[62525]: } {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1092.478238] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81e91ce-bea0-45c4-b966-18dba17cf952 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.486912] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Got total of 0 instances {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1130.779871] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.266447] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.266831] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1132.266831] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.769821] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.770060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.770227] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.770406] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.771289] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733ad6bd-8620-4daf-a3cc-8c269e17a4a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.779682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb765ae-65ad-46b9-a8bf-3a88b01817c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.793278] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12239f-93a3-4a21-bb66-d6b166600045 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.799346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09230e81-4cc8-4638-8083-5d22259ae76a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.828389] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181399MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1132.828529] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.828714] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.945717] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1133.945996] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.961798] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1133.973334] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1133.973535] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.982746] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1133.997225] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1134.008283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a4c258-a2fc-41d1-9f2e-8f7907af5e01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.016211] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13f4803-1df9-4443-b4c4-dd2f5a09f5fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.044881] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10e52fe-e61b-4fe7-9c1b-e351426670da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.051602] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09012252-fe78-41e8-9aaa-7efae40961ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.064729] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.568109] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1134.569370] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1134.569574] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.741s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.565367] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.565750] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.565750] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1135.565916] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1136.071612] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1136.071884] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.072028] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.266516] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.266760] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.267829] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.267655] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.770806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.771252] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.771252] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.771426] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1192.772346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3886b4-0eee-43b6-9632-3b78c8d0d78a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.780330] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d746791-b793-4a7c-8d37-3e6385924db7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.794188] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfecac24-fa49-4cab-8cd2-323f5f252a3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.800272] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625f3469-70ed-4c51-aaca-f8b112ab1413 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.830071] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1192.830212] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.830402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.848064] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1193.848318] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1193.860892] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d468ac-fb6e-49bd-8dce-393adcae83a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.868483] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bce7323-475b-4497-a3d0-8cdf7cecc02a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.898106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6376f3d3-fe79-4df1-acd8-8751a0992a98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.906441] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d79b66d-4013-43ee-863f-5fcc3e2f9cda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.919395] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.422564] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1194.423881] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1194.424080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.418630] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.418899] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.419018] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1195.419149] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1195.922396] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1195.922633] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.922762] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.922870] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1196.267045] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.267045] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.267045] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.262197] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.269031] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.269031] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.774897] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.775143] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.775311] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.775482] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1252.776439] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646b17ab-0f70-4121-a84f-fce9cf8fd6b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.786460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2661f4c-3ac6-4c75-85d8-acb7ebd6eed5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.803937] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628327c6-e8dc-4332-99c2-d7e4831b6f15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.812067] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91be430-1cda-4473-8c6c-f935e55d6cc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.844095] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181405MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1252.844095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.844095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.869507] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1253.869507] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1253.884087] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddc8697-4491-4094-ac29-482fd44ff70f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.893289] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9ababb-bae8-427a-99ba-7eac26ed4095 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.925529] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a919d4e0-c4f1-412b-b928-a3bb8798adb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.933475] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e8f98c-2dff-4719-98c9-20520b8654fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.946914] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.450055] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1254.451496] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1254.451677] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.608s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.135278] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "5c9ca73a-bc48-4a75-89c8-03def719e488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.135278] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.449412] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.449618] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.449762] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1255.449878] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1255.638895] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1255.953111] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1255.953111] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.953111] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1256.191559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.192154] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.193573] env[62525]: INFO nova.compute.claims [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1257.246245] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0572fe-8997-47ba-8ed2-ade6c6338eee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.256454] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e788466-3f3a-4ee4-8972-efb7e5468536 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.294646] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.296253] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477145c6-fd58-488d-9e8a-f890dad671d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.304799] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee123af9-f320-4e26-833d-87dfabac22ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.321360] env[62525]: DEBUG nova.compute.provider_tree [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.824955] env[62525]: DEBUG nova.scheduler.client.report [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1258.266897] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1258.266897] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1258.266897] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1258.334257] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.334257] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1258.843607] env[62525]: DEBUG nova.compute.utils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1258.845656] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1258.846058] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1259.352980] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1260.372266] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1260.669424] env[62525]: DEBUG nova.policy [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcacd9bdc246464082b40b55a9684a6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b52b70f6ed86437ea166c9b27682dcf4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1260.927902] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1260.927902] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1260.927902] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1260.928211] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1260.928211] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1260.928211] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1260.928211] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1260.928211] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1260.929155] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1260.929323] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1260.929627] env[62525]: DEBUG nova.virt.hardware [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1260.930453] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a18a082-29e2-4766-b7df-d57924aaeb5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.940839] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487cf805-fddb-4d77-822a-e640bda75cf5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.955651] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07abf00-a2aa-4136-a5d9-bd74aaf75076 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.493412] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Successfully created port: b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1263.731997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "5bffec39-0b09-49a0-a862-560720db45cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.732524] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.235376] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1264.642164] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Successfully updated port: b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1264.776433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.776433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.777984] env[62525]: INFO nova.compute.claims [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1265.147176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.148060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquired lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.148060] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1265.743170] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1265.854421] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51761d0-e205-411e-88b4-9f502e838dbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.864749] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7da525-00a5-4398-ae26-99b8e17921c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.909160] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc3e3b5-fab7-4002-ae73-568304179f94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.916590] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864f8cf8-214d-46a9-a0df-1d30713a914f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.931429] env[62525]: DEBUG nova.compute.provider_tree [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.144203] env[62525]: DEBUG nova.compute.manager [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Received event network-vif-plugged-b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1266.144799] env[62525]: DEBUG oslo_concurrency.lockutils [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] Acquiring lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.145173] env[62525]: DEBUG oslo_concurrency.lockutils [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.145451] env[62525]: DEBUG oslo_concurrency.lockutils [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.145451] env[62525]: DEBUG nova.compute.manager [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] No waiting events found dispatching network-vif-plugged-b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1266.145608] env[62525]: WARNING nova.compute.manager [req-b85a43a4-8462-4b00-96c0-384216531b5e req-09c2d37b-7b4b-4282-97be-2a8576bd9d9c service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Received unexpected event network-vif-plugged-b5fcb1bb-8ada-40c9-8436-fb190e5aef33 for instance with vm_state building and task_state spawning. [ 1266.212543] env[62525]: DEBUG nova.network.neutron [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Updating instance_info_cache with network_info: [{"id": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "address": "fa:16:3e:5e:08:5e", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5fcb1bb-8a", "ovs_interfaceid": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.434855] env[62525]: DEBUG nova.scheduler.client.report [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1266.575210] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.575210] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.716112] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Releasing lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.717086] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Instance network_info: |[{"id": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "address": "fa:16:3e:5e:08:5e", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5fcb1bb-8a", "ovs_interfaceid": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1266.717184] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:08:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5fcb1bb-8ada-40c9-8436-fb190e5aef33', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1266.735075] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1266.735843] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e52c0a3-99e7-4e11-b79e-9ba9211da295 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.752500] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Created folder: OpenStack in parent group-v4. [ 1266.752500] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating folder: Project (b52b70f6ed86437ea166c9b27682dcf4). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1266.752500] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1c78a5c-770a-44d6-a938-dfab03c882ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.761458] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Created folder: Project (b52b70f6ed86437ea166c9b27682dcf4) in parent group-v369553. [ 1266.763018] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating folder: Instances. Parent ref: group-v369554. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1266.763018] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-783e2494-2cc2-4341-a2db-f8ae5b4984dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.773864] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Created folder: Instances in parent group-v369554. [ 1266.774367] env[62525]: DEBUG oslo.service.loopingcall [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1266.774718] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1266.775040] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3d86f21-6a0d-4aa5-8877-40a7e6af4ad0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.796866] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1266.796866] env[62525]: value = "task-1780671" [ 1266.796866] env[62525]: _type = "Task" [ 1266.796866] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.806616] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780671, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.941690] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.165s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.942319] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1267.078088] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1267.309968] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780671, 'name': CreateVM_Task, 'duration_secs': 0.412192} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.310101] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1267.337846] env[62525]: DEBUG oslo_vmware.service [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e323625-2e84-46c3-9641-b8a224813536 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.344504] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.344827] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.345434] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1267.345708] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b67b3cb-9f11-4d56-8a11-e12e8e685b4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.350471] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1267.350471] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5278f060-cc76-2c2c-5a86-3b45efedf23f" [ 1267.350471] env[62525]: _type = "Task" [ 1267.350471] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.358445] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5278f060-cc76-2c2c-5a86-3b45efedf23f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.448165] env[62525]: DEBUG nova.compute.utils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1267.450497] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1267.450727] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1267.605881] env[62525]: DEBUG nova.policy [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd5e623206af4f9cbf552b6384a1a479', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2025cfd56fe442319ad1b3b7183da6a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1267.612191] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.612191] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.613215] env[62525]: INFO nova.compute.claims [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1267.864519] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.864930] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1267.865300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.865597] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.866188] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1267.866557] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-275ca277-1941-40ba-bbe0-a38bba53c3fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.889806] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1267.890030] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1267.892358] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f679bf5-32da-4258-a8ab-bd55868905be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.901907] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c65ef5e-d1b5-4daa-b4ca-8dd6bac2664e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.907832] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1267.907832] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529e5191-5c83-f787-4f37-3e6f9ef994ae" [ 1267.907832] env[62525]: _type = "Task" [ 1267.907832] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.916407] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529e5191-5c83-f787-4f37-3e6f9ef994ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.953596] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1268.357020] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Successfully created port: 4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1268.422181] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1268.422603] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating directory with path [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.423081] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb386683-d8ee-4e88-be05-8b108296e876 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.446212] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Created directory with path [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.446212] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Fetch image to [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1268.446212] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Downloading image file data a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 to [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk on the data store datastore1 {{(pid=62525) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1268.449837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdd5786-70bd-49ba-8bdb-163b1789c0a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.457196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61b21d0-ad51-4742-a5aa-acb3789663d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.471975] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a741075f-8be3-4fc9-928b-0e1b24b94959 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.511575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6779f9b4-06f3-411a-9a79-f34f3bc1117a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.518531] env[62525]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6d24916a-a934-46fc-8971-16d31f19424e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.613178] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Downloading image file data a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 to the data store datastore1 {{(pid=62525) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1268.724655] env[62525]: DEBUG oslo_vmware.rw_handles [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1268.800393] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecf201e-f783-415d-96aa-e5eee91cabff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.814894] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e62157-d4ed-494d-a504-5dc27436c20e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.855818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63308695-6189-44de-acf1-f7e916afa02b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.864564] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9190f42-3008-4689-93e7-fe4f57190a38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.889116] env[62525]: DEBUG nova.compute.provider_tree [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.895384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "c7603ce8-8471-4813-9faf-3667a205893c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.895684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.966520] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1269.009249] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1269.009454] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1269.009571] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.009720] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1269.009857] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.009992] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1269.010213] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1269.010359] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1269.010524] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1269.010787] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1269.010903] env[62525]: DEBUG nova.virt.hardware [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1269.011740] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597e9bbe-4c12-46ea-b1d6-7c66cc263d8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.021469] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d95066-e2fb-4293-9ef6-09a9f06d4043 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.391820] env[62525]: DEBUG nova.scheduler.client.report [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1269.401943] env[62525]: DEBUG oslo_vmware.rw_handles [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1269.401943] env[62525]: DEBUG oslo_vmware.rw_handles [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1269.402128] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1269.564155] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Downloaded image file data a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 to vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk on the data store datastore1 {{(pid=62525) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1269.568017] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1269.568017] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Copying Virtual Disk [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk to [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.568017] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33cf878b-c00d-41cc-8306-67da2f8d1d48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.575659] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1269.575659] env[62525]: value = "task-1780672" [ 1269.575659] env[62525]: _type = "Task" [ 1269.575659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.586202] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.909181] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.909181] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1269.943990] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.944211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.949726] env[62525]: INFO nova.compute.claims [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1270.091460] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780672, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.418525] env[62525]: DEBUG nova.compute.utils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1270.423994] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1270.423994] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1270.462376] env[62525]: DEBUG nova.compute.manager [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Received event network-changed-b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1270.462905] env[62525]: DEBUG nova.compute.manager [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Refreshing instance network info cache due to event network-changed-b5fcb1bb-8ada-40c9-8436-fb190e5aef33. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1270.462905] env[62525]: DEBUG oslo_concurrency.lockutils [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] Acquiring lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.462905] env[62525]: DEBUG oslo_concurrency.lockutils [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] Acquired lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.463090] env[62525]: DEBUG nova.network.neutron [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Refreshing network info cache for port b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1270.575597] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Successfully updated port: 4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1270.589858] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.842937} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.591805] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Copied Virtual Disk [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk to [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.592020] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleting the datastore file [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/tmp-sparse.vmdk {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1270.592314] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8960e2d-56e1-45e4-a46c-41a39a9155e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.603079] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1270.603079] env[62525]: value = "task-1780673" [ 1270.603079] env[62525]: _type = "Task" [ 1270.603079] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.611908] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.690953] env[62525]: DEBUG nova.policy [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1270.811861] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.812220] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.927451] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1271.071282] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ceb72c-c4ec-4420-9cf3-1a5ca5ab5267 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.081268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1271.081433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquired lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.081582] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1271.084663] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df0c668-69d0-49ee-8337-3432187ae8af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.132688] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b51570d-8140-4ee3-8515-86712f1ac031 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.141697] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043047} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.143470] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1271.143691] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Moving file from [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 to [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36. {{(pid=62525) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1271.144012] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b02f741a-f452-44f2-9153-e55ccca1a211 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.152283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2f4c43-26d0-4772-b574-a8730f9a7eaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.156206] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1271.156206] env[62525]: value = "task-1780674" [ 1271.156206] env[62525]: _type = "Task" [ 1271.156206] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.172855] env[62525]: DEBUG nova.compute.provider_tree [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.179372] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780674, 'name': MoveDatastoreFile_Task} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.179605] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] File moved {{(pid=62525) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1271.180371] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Cleaning up location [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1271.180371] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleting the datastore file [datastore1] vmware_temp/a8785254-cf3f-4a5b-8be4-268d6147475c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1271.180371] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-562765bb-dc90-4773-8515-94678e6c3da0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.187357] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1271.187357] env[62525]: value = "task-1780675" [ 1271.187357] env[62525]: _type = "Task" [ 1271.187357] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.195457] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.314192] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1271.556061] env[62525]: DEBUG nova.network.neutron [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Updated VIF entry in instance network info cache for port b5fcb1bb-8ada-40c9-8436-fb190e5aef33. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.556061] env[62525]: DEBUG nova.network.neutron [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Updating instance_info_cache with network_info: [{"id": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "address": "fa:16:3e:5e:08:5e", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5fcb1bb-8a", "ovs_interfaceid": "b5fcb1bb-8ada-40c9-8436-fb190e5aef33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.643436] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1271.676992] env[62525]: DEBUG nova.scheduler.client.report [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1271.698391] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025371} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.698807] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1271.699766] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c9f4452-d8e0-4709-be8b-1ab1653f1a60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.705916] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1271.705916] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ec4328-e331-7507-cad7-8e3b2782425e" [ 1271.705916] env[62525]: _type = "Task" [ 1271.705916] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.714750] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ec4328-e331-7507-cad7-8e3b2782425e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.843257] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.895759] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Successfully created port: 9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1271.942332] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1271.956675] env[62525]: DEBUG nova.network.neutron [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Updating instance_info_cache with network_info: [{"id": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "address": "fa:16:3e:7e:3f:e4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbce67-8f", "ovs_interfaceid": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.969581] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1271.970120] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1271.970363] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.970688] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1271.970800] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.970998] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1271.971273] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1271.971502] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1271.971689] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1271.971906] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1271.972182] env[62525]: DEBUG nova.virt.hardware [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1271.973560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0015ac02-f0f3-413b-816c-d4933d7ad596 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.985967] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06250a1-9dc5-433a-852c-ab89f99f8d6b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.057885] env[62525]: DEBUG oslo_concurrency.lockutils [req-328257b7-cfa2-4d54-9d4f-01b1c60f985a req-681d239a-af24-4655-8b32-18cde78de395 service nova] Releasing lock "refresh_cache-5c9ca73a-bc48-4a75-89c8-03def719e488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.184145] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.184708] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1272.188225] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.345s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.189339] env[62525]: INFO nova.compute.claims [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.220420] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ec4328-e331-7507-cad7-8e3b2782425e, 'name': SearchDatastore_Task, 'duration_secs': 0.009262} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.220717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.220972] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5c9ca73a-bc48-4a75-89c8-03def719e488/5c9ca73a-bc48-4a75-89c8-03def719e488.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1272.221277] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ec218aa-1a4b-4f98-85a6-df4c0dc045e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.229460] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1272.229460] env[62525]: value = "task-1780676" [ 1272.229460] env[62525]: _type = "Task" [ 1272.229460] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.244585] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.463894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Releasing lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.463894] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Instance network_info: |[{"id": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "address": "fa:16:3e:7e:3f:e4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbce67-8f", "ovs_interfaceid": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1272.464038] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:3f:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cdbce67-8f63-4bbb-9079-0e1a0038ee8d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1272.475087] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Creating folder: Project (2025cfd56fe442319ad1b3b7183da6a1). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1272.475087] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-afc9080d-c56f-41b6-a9cc-09bc7603c4ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.490320] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Created folder: Project (2025cfd56fe442319ad1b3b7183da6a1) in parent group-v369553. [ 1272.490320] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Creating folder: Instances. Parent ref: group-v369557. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1272.490320] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71f12ca9-67d2-426d-a605-27c7045ef6f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.498880] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Created folder: Instances in parent group-v369557. [ 1272.499160] env[62525]: DEBUG oslo.service.loopingcall [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1272.499789] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1272.499789] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a64c2f4-6fd5-43e4-93bc-e9165ca37afc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.524202] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1272.524202] env[62525]: value = "task-1780679" [ 1272.524202] env[62525]: _type = "Task" [ 1272.524202] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.530917] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780679, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.694326] env[62525]: DEBUG nova.compute.utils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.698028] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1272.698028] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1272.742744] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780676, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.773172] env[62525]: DEBUG nova.policy [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9806f4b5cc1d4ebcb79c98b694e97591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '689156401b324ce3a5021a6079c5b27b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1272.890914] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.891464] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.035033] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780679, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.049285] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Successfully created port: 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.198427] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1273.246060] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608994} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.247309] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5c9ca73a-bc48-4a75-89c8-03def719e488/5c9ca73a-bc48-4a75-89c8-03def719e488.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1273.247422] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1273.248028] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09e2dbc3-6dfc-4e15-b9c0-8bf571ffd87c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.257813] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1273.257813] env[62525]: value = "task-1780680" [ 1273.257813] env[62525]: _type = "Task" [ 1273.257813] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.268913] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780680, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.329937] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f0a808-7ce7-4f7a-8b0b-a5b42c1bf4d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.339161] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd0c077-62a9-421a-9bbe-613de9d3935f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.379025] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97432b16-93c1-47b0-9e9e-1cd4a50b32a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.384834] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2099fa2-3c79-46a0-8288-526e946c8b78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.408915] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1273.412500] env[62525]: DEBUG nova.compute.provider_tree [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.542099] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780679, 'name': CreateVM_Task, 'duration_secs': 0.749783} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.542252] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1273.543352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.543352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.543433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1273.543661] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16dfd0e1-0500-40e8-b1b4-311430ece8a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.550436] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1273.550436] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52df837e-5c10-4ce9-6526-7ab8158cf563" [ 1273.550436] env[62525]: _type = "Task" [ 1273.550436] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.560285] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52df837e-5c10-4ce9-6526-7ab8158cf563, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.771287] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780680, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07399} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.771551] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1273.772453] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5edcd74-6fb1-4aea-9599-8757484ff466 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.801821] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 5c9ca73a-bc48-4a75-89c8-03def719e488/5c9ca73a-bc48-4a75-89c8-03def719e488.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1273.801992] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9da5b22-7153-433f-8f4d-a86e85e130da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.827020] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1273.827020] env[62525]: value = "task-1780681" [ 1273.827020] env[62525]: _type = "Task" [ 1273.827020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.838704] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780681, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.920894] env[62525]: DEBUG nova.scheduler.client.report [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1273.948272] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.225507] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1274.228535] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "deef59c8-f710-434d-bddc-f63bb3d518b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.228786] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.248718] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52df837e-5c10-4ce9-6526-7ab8158cf563, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.249143] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.249143] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1274.249777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.249777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.249777] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1274.249974] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a232b187-2ea2-4260-a274-b223bb08d43b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.263418] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1274.263810] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1274.264366] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f759d92-04a6-47c3-bffd-1712035597b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.272671] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1274.272671] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c3c382-643c-2b46-8c0c-fa2d5d5563c9" [ 1274.272671] env[62525]: _type = "Task" [ 1274.272671] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.284584] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c3c382-643c-2b46-8c0c-fa2d5d5563c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.287755] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1274.287999] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1274.288171] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.288348] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1274.288485] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.288619] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1274.288812] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1274.288958] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1274.289123] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1274.289274] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1274.289434] env[62525]: DEBUG nova.virt.hardware [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1274.290220] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ffbf33-520f-433b-a0e8-3d20efd719a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.298303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a9e424-aec4-4d78-b24e-4c94b5874f20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.336051] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780681, 'name': ReconfigVM_Task, 'duration_secs': 0.304035} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.336637] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 5c9ca73a-bc48-4a75-89c8-03def719e488/5c9ca73a-bc48-4a75-89c8-03def719e488.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1274.337031] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d16f95a5-f284-4d33-95b6-61f807ad9d96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.343528] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1274.343528] env[62525]: value = "task-1780682" [ 1274.343528] env[62525]: _type = "Task" [ 1274.343528] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.351567] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780682, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.427654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.428203] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1274.434033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.486s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.436571] env[62525]: INFO nova.compute.claims [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1274.736912] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Successfully updated port: 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1274.740494] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1274.785121] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c3c382-643c-2b46-8c0c-fa2d5d5563c9, 'name': SearchDatastore_Task, 'duration_secs': 0.010505} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.785121] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f5876d-ffa6-4851-a1ca-f4110220ea4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.790292] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1274.790292] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522abc14-4347-11bc-a3e8-69d8140daf85" [ 1274.790292] env[62525]: _type = "Task" [ 1274.790292] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.799662] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522abc14-4347-11bc-a3e8-69d8140daf85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.857861] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780682, 'name': Rename_Task, 'duration_secs': 0.146738} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.857861] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1274.857861] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd6672f8-548b-43a8-bf54-aea13237eeb3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.867353] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1274.867353] env[62525]: value = "task-1780683" [ 1274.867353] env[62525]: _type = "Task" [ 1274.867353] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.876426] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.935350] env[62525]: DEBUG nova.compute.utils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1274.936932] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1274.937609] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1274.966143] env[62525]: DEBUG nova.compute.manager [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Received event network-vif-plugged-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1274.971715] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Acquiring lock "5bffec39-0b09-49a0-a862-560720db45cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.971715] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Lock "5bffec39-0b09-49a0-a862-560720db45cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.971715] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Lock "5bffec39-0b09-49a0-a862-560720db45cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.971715] env[62525]: DEBUG nova.compute.manager [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] No waiting events found dispatching network-vif-plugged-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1274.971715] env[62525]: WARNING nova.compute.manager [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Received unexpected event network-vif-plugged-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d for instance with vm_state building and task_state spawning. [ 1274.971869] env[62525]: DEBUG nova.compute.manager [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Received event network-changed-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1274.971869] env[62525]: DEBUG nova.compute.manager [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Refreshing instance network info cache due to event network-changed-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1274.971869] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Acquiring lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.971869] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Acquired lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.971869] env[62525]: DEBUG nova.network.neutron [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Refreshing network info cache for port 4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.999904] env[62525]: DEBUG nova.policy [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6e5adfb79a4911a67c14a7f7b41a17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6486285375a44318c14aee23e914dcf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1275.244872] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.245157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquired lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.245256] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.258908] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Successfully updated port: 9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1275.279657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.301335] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522abc14-4347-11bc-a3e8-69d8140daf85, 'name': SearchDatastore_Task, 'duration_secs': 0.008951} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.301660] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.301923] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5bffec39-0b09-49a0-a862-560720db45cd/5bffec39-0b09-49a0-a862-560720db45cd.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1275.302390] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1729c81b-c25c-4f3e-ad23-bd5f88b179e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.310468] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1275.310468] env[62525]: value = "task-1780684" [ 1275.310468] env[62525]: _type = "Task" [ 1275.310468] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.325843] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.378459] env[62525]: DEBUG oslo_vmware.api [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780683, 'name': PowerOnVM_Task, 'duration_secs': 0.479445} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.378659] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1275.379092] env[62525]: INFO nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Took 15.01 seconds to spawn the instance on the hypervisor. [ 1275.379535] env[62525]: DEBUG nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1275.380354] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26052f44-2feb-460e-9f9d-0fc89ef38b59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.404235] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Successfully created port: 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1275.440641] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1275.576907] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ffbe9f-7da1-4a68-ae31-60a94728474e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.585566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2da84f-405d-4e1d-a76d-7a2fc57a1f2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.623963] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f9ba8e-3e8f-4148-b14a-256383a9d169 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.631930] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfec9397-4a88-4f5b-9ad5-dea56034bd35 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.648138] env[62525]: DEBUG nova.compute.provider_tree [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1275.763299] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.763649] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.763649] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.800784] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1275.820918] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.911628] env[62525]: INFO nova.compute.manager [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Took 19.76 seconds to build instance. [ 1276.007730] env[62525]: DEBUG nova.network.neutron [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updating instance_info_cache with network_info: [{"id": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "address": "fa:16:3e:67:c6:a3", "network": {"id": "5540caf8-99d8-46af-9d9a-9a73ba235686", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-477507757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689156401b324ce3a5021a6079c5b27b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eeddc3f-2e", "ovs_interfaceid": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.151558] env[62525]: DEBUG nova.scheduler.client.report [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.248179] env[62525]: DEBUG nova.network.neutron [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Updated VIF entry in instance network info cache for port 4cdbce67-8f63-4bbb-9079-0e1a0038ee8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1276.248656] env[62525]: DEBUG nova.network.neutron [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Updating instance_info_cache with network_info: [{"id": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "address": "fa:16:3e:7e:3f:e4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbce67-8f", "ovs_interfaceid": "4cdbce67-8f63-4bbb-9079-0e1a0038ee8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.303714] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1276.325963] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.413300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b6f84e2d-ec99-4d61-b72c-f853fa278608 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.278s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.459329] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1276.487224] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1276.487659] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1276.491942] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.491942] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1276.491942] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.491942] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1276.491942] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1276.492357] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1276.492357] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1276.492357] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1276.492357] env[62525]: DEBUG nova.virt.hardware [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1276.492357] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be13e24a-5ee5-45db-b281-6a79d8bd8ca3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.506918] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfe96eb-56d3-4d34-9c29-fe439f7116f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.510903] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Releasing lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.511161] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance network_info: |[{"id": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "address": "fa:16:3e:67:c6:a3", "network": {"id": "5540caf8-99d8-46af-9d9a-9a73ba235686", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-477507757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689156401b324ce3a5021a6079c5b27b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eeddc3f-2e", "ovs_interfaceid": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1276.511528] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:c6:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1276.520121] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Creating folder: Project (689156401b324ce3a5021a6079c5b27b). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1276.520923] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a70aab1-2327-45fd-880d-de7590fc173c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.534447] env[62525]: DEBUG nova.network.neutron [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.545757] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Created folder: Project (689156401b324ce3a5021a6079c5b27b) in parent group-v369553. [ 1276.546308] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Creating folder: Instances. Parent ref: group-v369560. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1276.547250] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e2122be-6a44-4250-8afc-d48fa18f433a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.556939] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Created folder: Instances in parent group-v369560. [ 1276.557189] env[62525]: DEBUG oslo.service.loopingcall [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1276.557371] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1276.557570] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc419659-4853-4d14-90f1-98f66f2e3888 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.580668] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1276.580668] env[62525]: value = "task-1780687" [ 1276.580668] env[62525]: _type = "Task" [ 1276.580668] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.593213] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780687, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.658989] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.659379] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1276.663315] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.384s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.665219] env[62525]: INFO nova.compute.claims [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.751641] env[62525]: DEBUG oslo_concurrency.lockutils [req-fa429536-3720-4376-844c-0b711f7a7de2 req-2bba3dbc-0bbd-43dc-9f58-238b9db4d3c8 service nova] Releasing lock "refresh_cache-5bffec39-0b09-49a0-a862-560720db45cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.826249] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.891671] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.891967] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.018068] env[62525]: DEBUG nova.compute.manager [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-vif-plugged-9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1277.018230] env[62525]: DEBUG oslo_concurrency.lockutils [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.018429] env[62525]: DEBUG oslo_concurrency.lockutils [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.018588] env[62525]: DEBUG oslo_concurrency.lockutils [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.018750] env[62525]: DEBUG nova.compute.manager [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] No waiting events found dispatching network-vif-plugged-9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1277.018939] env[62525]: WARNING nova.compute.manager [req-267c32ec-2088-4378-bd31-07839dd53445 req-3343133c-4d99-4359-bc38-9a7282f3e3e5 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received unexpected event network-vif-plugged-9c337d27-bc69-4787-a533-f523faa8aa10 for instance with vm_state building and task_state spawning. [ 1277.037772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.038102] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Instance network_info: |[{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1277.038594] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:65:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c337d27-bc69-4787-a533-f523faa8aa10', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.046036] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating folder: Project (b12f7b101b8848f28f2fc65ce3f0076c). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1277.046576] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15a107de-eee9-4337-8c6e-481c7fd1d02a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.057905] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created folder: Project (b12f7b101b8848f28f2fc65ce3f0076c) in parent group-v369553. [ 1277.058112] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating folder: Instances. Parent ref: group-v369563. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1277.058340] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aad67aa1-405f-4a77-a47d-b1aa3b593190 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.067266] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created folder: Instances in parent group-v369563. [ 1277.067487] env[62525]: DEBUG oslo.service.loopingcall [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.067677] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1277.067879] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f8c66d4-b6e1-4607-8955-474d368b7869 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.092771] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780687, 'name': CreateVM_Task, 'duration_secs': 0.357043} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.093931] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1277.094141] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.094141] env[62525]: value = "task-1780690" [ 1277.094141] env[62525]: _type = "Task" [ 1277.094141] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.095159] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.095444] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.095799] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1277.096085] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e433c3b-f2c8-4075-97f7-2b0700f4bd78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.103474] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1277.103474] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f4c8dc-2c13-bd08-244e-f2c85c268e1f" [ 1277.103474] env[62525]: _type = "Task" [ 1277.103474] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.106490] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780690, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.114480] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f4c8dc-2c13-bd08-244e-f2c85c268e1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.170297] env[62525]: DEBUG nova.compute.utils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1277.173684] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1277.174041] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1277.323479] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.331326] env[62525]: DEBUG nova.policy [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce008a841cf41389c74dce78bf3cd99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a380a12ff0444c989a3a42dbaf5d579e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1277.366619] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Successfully updated port: 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1277.398110] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1277.611898] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780690, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.624460] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f4c8dc-2c13-bd08-244e-f2c85c268e1f, 'name': SearchDatastore_Task, 'duration_secs': 0.256647} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.624559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.625329] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1277.625329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.625329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.625499] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.625801] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36183165-8110-4026-9dcc-1eaf3af64761 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.642931] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.643437] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1277.644135] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc289780-f475-4b74-8d11-abe419b8662e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.653247] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1277.653247] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3964-0b67-15e0-7756-b09556df0cbf" [ 1277.653247] env[62525]: _type = "Task" [ 1277.653247] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.664806] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3964-0b67-15e0-7756-b09556df0cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.674821] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1277.827646] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780684, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.457102} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.827760] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5bffec39-0b09-49a0-a862-560720db45cd/5bffec39-0b09-49a0-a862-560720db45cd.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1277.827902] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1277.828203] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72e9213c-7bd1-4194-a9d5-3b2209e6d716 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.835660] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1277.835660] env[62525]: value = "task-1780691" [ 1277.835660] env[62525]: _type = "Task" [ 1277.835660] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.847440] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.870153] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.870903] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.870903] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1277.917917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.948062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad3d83b-8307-4b7e-886a-c96beb262332 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.959364] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad67abc5-c516-4142-b825-2ef97dbbb0eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.004402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7550c891-5779-412e-828a-292b1f672014 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.012706] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add627ce-0a90-4112-8715-f874cbc8543a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.028917] env[62525]: DEBUG nova.compute.provider_tree [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.119445] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780690, 'name': CreateVM_Task, 'duration_secs': 0.611927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.119445] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1278.119759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.119845] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.120163] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.120404] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b24bace-fdbe-47bc-8880-1ba87f5736d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.125767] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1278.125767] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52508001-fb03-46fa-60b9-85edd7b36d6e" [ 1278.125767] env[62525]: _type = "Task" [ 1278.125767] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.134854] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52508001-fb03-46fa-60b9-85edd7b36d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.164018] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3964-0b67-15e0-7756-b09556df0cbf, 'name': SearchDatastore_Task, 'duration_secs': 0.064676} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.165124] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da437b0-a15d-4de9-ac45-6f4a7854f224 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.171065] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1278.171065] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52634123-b9f4-6acf-49ec-a00826d6aca7" [ 1278.171065] env[62525]: _type = "Task" [ 1278.171065] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.181235] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52634123-b9f4-6acf-49ec-a00826d6aca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.229504] env[62525]: DEBUG nova.compute.manager [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Received event network-vif-plugged-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1278.229746] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Acquiring lock "c7603ce8-8471-4813-9faf-3667a205893c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.231364] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Lock "c7603ce8-8471-4813-9faf-3667a205893c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.231589] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Lock "c7603ce8-8471-4813-9faf-3667a205893c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.232078] env[62525]: DEBUG nova.compute.manager [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] No waiting events found dispatching network-vif-plugged-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1278.232382] env[62525]: WARNING nova.compute.manager [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Received unexpected event network-vif-plugged-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b for instance with vm_state building and task_state spawning. [ 1278.232458] env[62525]: DEBUG nova.compute.manager [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Received event network-changed-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1278.232597] env[62525]: DEBUG nova.compute.manager [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Refreshing instance network info cache due to event network-changed-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1278.232793] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Acquiring lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.232923] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Acquired lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.233615] env[62525]: DEBUG nova.network.neutron [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Refreshing network info cache for port 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1278.302966] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Successfully created port: 53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.347617] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190904} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.348163] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1278.350969] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1912ada0-059e-4a58-9947-18e30e8f03a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.374590] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 5bffec39-0b09-49a0-a862-560720db45cd/5bffec39-0b09-49a0-a862-560720db45cd.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1278.377252] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64c5e5f-0ebf-4c61-8ed5-1df1c606175c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.399377] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1278.399377] env[62525]: value = "task-1780692" [ 1278.399377] env[62525]: _type = "Task" [ 1278.399377] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.407943] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.410127] env[62525]: DEBUG nova.compute.manager [None req-9829aefb-21be-43ce-93d8-c3a70c0c67f8 tempest-ServerDiagnosticsTest-1189585027 tempest-ServerDiagnosticsTest-1189585027-project-admin] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1278.411170] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6516fc2e-9dcb-4deb-91ed-588d5aba4dbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.417464] env[62525]: INFO nova.compute.manager [None req-9829aefb-21be-43ce-93d8-c3a70c0c67f8 tempest-ServerDiagnosticsTest-1189585027 tempest-ServerDiagnosticsTest-1189585027-project-admin] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Retrieving diagnostics [ 1278.418413] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcb6770-587a-4a8f-94f4-4ee1b8db5f59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.471796] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1278.537944] env[62525]: DEBUG nova.scheduler.client.report [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.641729] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52508001-fb03-46fa-60b9-85edd7b36d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.009643} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.642707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.643110] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.643554] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.682465] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52634123-b9f4-6acf-49ec-a00826d6aca7, 'name': SearchDatastore_Task, 'duration_secs': 0.013499} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.682710] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.682976] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c7603ce8-8471-4813-9faf-3667a205893c/c7603ce8-8471-4813-9faf-3667a205893c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1278.683309] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.683522] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1278.683802] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-487001f4-9573-4b8e-8f42-839cbb7c3da0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.686281] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-476282cb-24c9-4c36-bc7a-71206229fb43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.689023] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1278.697054] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1278.697054] env[62525]: value = "task-1780693" [ 1278.697054] env[62525]: _type = "Task" [ 1278.697054] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.698336] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1278.698504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1278.702933] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae7d4e8-fb8a-47cf-9e74-d0752fd99be2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.711885] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.712262] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1278.712262] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5218d383-e937-f2df-307b-d57711815565" [ 1278.712262] env[62525]: _type = "Task" [ 1278.712262] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.725531] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5218d383-e937-f2df-307b-d57711815565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.733184] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1278.733444] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1278.733620] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.733858] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1278.734040] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.734219] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1278.734460] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1278.734625] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1278.734832] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1278.735041] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1278.735264] env[62525]: DEBUG nova.virt.hardware [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1278.736200] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322e1e01-e3eb-4140-8ea5-9e0b8f4b7b3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.745949] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9c9b5a-4593-4b54-abb5-4900cb91d19d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.829737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.829980] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.914954] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780692, 'name': ReconfigVM_Task, 'duration_secs': 0.414871} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.914954] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 5bffec39-0b09-49a0-a862-560720db45cd/5bffec39-0b09-49a0-a862-560720db45cd.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.914954] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-285b2fdb-ec3b-44ef-bb17-5ab227387237 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.922596] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1278.922596] env[62525]: value = "task-1780694" [ 1278.922596] env[62525]: _type = "Task" [ 1278.922596] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.934511] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780694, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.936141] env[62525]: DEBUG nova.network.neutron [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.042206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.042736] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1279.049620] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.132s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.052550] env[62525]: INFO nova.compute.claims [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.210185] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780693, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.223173] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5218d383-e937-f2df-307b-d57711815565, 'name': SearchDatastore_Task, 'duration_secs': 0.025601} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.224350] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38403607-ef40-430d-b5e8-9797480f25a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.229930] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1279.229930] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b4d027-f05c-cf2f-aaaf-50bf2afbef51" [ 1279.229930] env[62525]: _type = "Task" [ 1279.229930] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.238711] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b4d027-f05c-cf2f-aaaf-50bf2afbef51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.332600] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1279.437644] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780694, 'name': Rename_Task, 'duration_secs': 0.428234} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.438135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.438482] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Instance network_info: |[{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1279.438817] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.439697] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:8d:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1790239c-c6c1-47bb-ac87-c96e5a2f2e8d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.448304] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Creating folder: Project (c6486285375a44318c14aee23e914dcf). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.448632] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f78a9319-3b56-47f7-92aa-17b29749d00e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.450429] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eafd9dfa-a826-48e7-9751-ff79db64e759 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.459222] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1279.459222] env[62525]: value = "task-1780695" [ 1279.459222] env[62525]: _type = "Task" [ 1279.459222] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.468039] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Created folder: Project (c6486285375a44318c14aee23e914dcf) in parent group-v369553. [ 1279.468039] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Creating folder: Instances. Parent ref: group-v369566. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1279.471348] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32bc4270-8fdf-4d6c-9e6c-44c5d0ca783c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.472386] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.479624] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Created folder: Instances in parent group-v369566. [ 1279.479868] env[62525]: DEBUG oslo.service.loopingcall [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1279.480072] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1279.480271] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e63f179-593c-4d13-9526-8c88523635fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.504944] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1279.504944] env[62525]: value = "task-1780698" [ 1279.504944] env[62525]: _type = "Task" [ 1279.504944] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.513942] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780698, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.558172] env[62525]: DEBUG nova.compute.utils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1279.562193] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1279.562367] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1279.711080] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780693, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555121} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.711555] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c7603ce8-8471-4813-9faf-3667a205893c/c7603ce8-8471-4813-9faf-3667a205893c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1279.711693] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1279.713254] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6bfeaa04-5b36-405f-af98-2fb7f2f377e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.727073] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1279.727073] env[62525]: value = "task-1780699" [ 1279.727073] env[62525]: _type = "Task" [ 1279.727073] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.742062] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780699, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.746519] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b4d027-f05c-cf2f-aaaf-50bf2afbef51, 'name': SearchDatastore_Task, 'duration_secs': 0.01215} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.747353] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.747353] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3/8adc8b4b-1087-4a11-9ee8-d897f1aa83f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1279.747353] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e7714a0-8929-4e8d-9353-b74810fded95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.754110] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1279.754110] env[62525]: value = "task-1780700" [ 1279.754110] env[62525]: _type = "Task" [ 1279.754110] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.762651] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.777107] env[62525]: DEBUG nova.policy [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7118afceba3b4847a21527897bcf38b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd45bea689ee84b988655da8fff7c4546', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1279.785146] env[62525]: DEBUG nova.network.neutron [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updated VIF entry in instance network info cache for port 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1279.785739] env[62525]: DEBUG nova.network.neutron [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updating instance_info_cache with network_info: [{"id": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "address": "fa:16:3e:67:c6:a3", "network": {"id": "5540caf8-99d8-46af-9d9a-9a73ba235686", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-477507757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689156401b324ce3a5021a6079c5b27b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eeddc3f-2e", "ovs_interfaceid": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.870096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.972473] env[62525]: DEBUG oslo_vmware.api [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780695, 'name': PowerOnVM_Task, 'duration_secs': 0.490345} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.973510] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.973827] env[62525]: INFO nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Took 11.01 seconds to spawn the instance on the hypervisor. [ 1279.974285] env[62525]: DEBUG nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1279.975252] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e1aabd-1f9b-49c9-b143-91811051c1d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.018319] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780698, 'name': CreateVM_Task, 'duration_secs': 0.482761} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.019256] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1280.019945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.020152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.020675] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1280.020859] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acfa71e7-7c2a-4f8f-850e-37669dadfd80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.026213] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1280.026213] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5225dd7b-34c2-6457-07f6-54670ff3c9cc" [ 1280.026213] env[62525]: _type = "Task" [ 1280.026213] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.034608] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5225dd7b-34c2-6457-07f6-54670ff3c9cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.066963] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1280.239515] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780699, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06935} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.243406] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1280.244327] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90de619-b688-4199-8afb-deb2f9915674 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.278938] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] c7603ce8-8471-4813-9faf-3667a205893c/c7603ce8-8471-4813-9faf-3667a205893c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1280.291022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ebcd2e1-fec5-4cf5-82d4-37bc234edc6d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.315481] env[62525]: DEBUG oslo_concurrency.lockutils [req-e4a5706e-cf0a-4281-b4f5-f48c0cc41c42 req-1cfbca08-a1a5-438d-9a77-67f15004601b service nova] Releasing lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.325135] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780700, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.327144] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1280.327144] env[62525]: value = "task-1780701" [ 1280.327144] env[62525]: _type = "Task" [ 1280.327144] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.339047] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780701, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.341381] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863545be-9766-481a-b76d-8d4b9aab2233 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.354614] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537e055f-5c9b-4f78-99e1-c619909818c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.398599] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a25c01d-f846-4ae0-95c9-48723ea0af0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.408016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eaab426-123e-44e4-a190-1de78a098cf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.424526] env[62525]: DEBUG nova.compute.provider_tree [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.485107] env[62525]: DEBUG nova.compute.manager [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-changed-9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1280.485389] env[62525]: DEBUG nova.compute.manager [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing instance network info cache due to event network-changed-9c337d27-bc69-4787-a533-f523faa8aa10. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1280.485564] env[62525]: DEBUG oslo_concurrency.lockutils [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.485707] env[62525]: DEBUG oslo_concurrency.lockutils [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.485869] env[62525]: DEBUG nova.network.neutron [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing network info cache for port 9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.503159] env[62525]: INFO nova.compute.manager [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Took 15.76 seconds to build instance. [ 1280.541828] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5225dd7b-34c2-6457-07f6-54670ff3c9cc, 'name': SearchDatastore_Task, 'duration_secs': 0.028874} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.543768] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.543768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1280.543768] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.543768] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.543972] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.543972] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afe1010d-8cb1-44da-a377-dd8b56c636f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.552590] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.552765] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1280.553633] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-652a1684-e36a-414e-8105-190b327c0c22 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.566409] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1280.566409] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5275751d-7a5d-651c-6029-8c2f5493d6a3" [ 1280.566409] env[62525]: _type = "Task" [ 1280.566409] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.580543] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5275751d-7a5d-651c-6029-8c2f5493d6a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.780491] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726133} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.782310] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3/8adc8b4b-1087-4a11-9ee8-d897f1aa83f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1280.783091] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1280.783091] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d6e43ae-0b13-4d6b-a60d-c6229d987e11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.794139] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1280.794139] env[62525]: value = "task-1780702" [ 1280.794139] env[62525]: _type = "Task" [ 1280.794139] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.807441] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.842864] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780701, 'name': ReconfigVM_Task, 'duration_secs': 0.320329} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.842915] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Reconfigured VM instance instance-00000004 to attach disk [datastore1] c7603ce8-8471-4813-9faf-3667a205893c/c7603ce8-8471-4813-9faf-3667a205893c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1280.843541] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-594bc12d-595f-4475-87b9-efdeb870d87c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.853234] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1280.853234] env[62525]: value = "task-1780703" [ 1280.853234] env[62525]: _type = "Task" [ 1280.853234] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.865475] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780703, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.909351] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.910068] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.930841] env[62525]: DEBUG nova.scheduler.client.report [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1281.005271] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fc52f8ee-36ea-481c-a97d-7ebde6924d8c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.273s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.079189] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5275751d-7a5d-651c-6029-8c2f5493d6a3, 'name': SearchDatastore_Task, 'duration_secs': 0.016069} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.079991] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e7e935-9654-4acb-855f-71dcf95703c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.084670] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1281.090974] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1281.090974] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e1c045-433f-cea1-209b-c1b1ea1df5aa" [ 1281.090974] env[62525]: _type = "Task" [ 1281.090974] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.099697] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "5c9ca73a-bc48-4a75-89c8-03def719e488" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.099854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.100048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.100231] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.101747] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.102747] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e1c045-433f-cea1-209b-c1b1ea1df5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.102747] env[62525]: INFO nova.compute.manager [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Terminating instance [ 1281.105275] env[62525]: DEBUG nova.compute.manager [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1281.106771] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1281.107008] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddcb386-ba52-4162-84d9-e4c482b3f146 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.117214] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1281.117469] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2439c442-e469-446b-83fc-91b141d2d174 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.124685] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1281.124685] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1281.124685] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1281.125156] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1281.125156] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1281.125156] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1281.125156] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1281.125156] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1281.125345] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1281.125345] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1281.125345] env[62525]: DEBUG nova.virt.hardware [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1281.126193] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cec1b1-c48b-466d-8c88-12ec54485eda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.135576] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1281.135576] env[62525]: value = "task-1780704" [ 1281.135576] env[62525]: _type = "Task" [ 1281.135576] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.143754] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a10d09-8578-4422-82ed-d6e274747f25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.151305] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.196551] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Successfully created port: bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.230718] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Successfully updated port: 53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1281.305700] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070607} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.305700] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.306645] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06e08fb-2e8e-4d2b-90e8-d375cebe50d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.334293] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3/8adc8b4b-1087-4a11-9ee8-d897f1aa83f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.334639] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5fae44-72ed-4770-bdac-4edf4b922697 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.355080] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1281.355080] env[62525]: value = "task-1780705" [ 1281.355080] env[62525]: _type = "Task" [ 1281.355080] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.366900] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780703, 'name': Rename_Task, 'duration_secs': 0.156628} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.370179] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1281.370436] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780705, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.370666] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bc1b308-e1f3-4329-85f6-1801313fa7ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.376251] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1281.376251] env[62525]: value = "task-1780706" [ 1281.376251] env[62525]: _type = "Task" [ 1281.376251] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.384794] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.412760] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1281.438326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.438733] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1281.441335] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.572s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.442852] env[62525]: INFO nova.compute.claims [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1281.449692] env[62525]: DEBUG nova.compute.manager [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-vif-plugged-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1281.449692] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Acquiring lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.449855] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.449992] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.450163] env[62525]: DEBUG nova.compute.manager [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] No waiting events found dispatching network-vif-plugged-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1281.450321] env[62525]: WARNING nova.compute.manager [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received unexpected event network-vif-plugged-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d for instance with vm_state building and task_state spawning. [ 1281.450469] env[62525]: DEBUG nova.compute.manager [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1281.450618] env[62525]: DEBUG nova.compute.manager [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing instance network info cache due to event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1281.450796] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.451384] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.451519] env[62525]: DEBUG nova.network.neutron [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1281.606905] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e1c045-433f-cea1-209b-c1b1ea1df5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.027218} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.606905] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.606905] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07/5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1281.606905] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5c59863-85ce-4ac2-bfd9-1720a70f199f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.614826] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1281.614826] env[62525]: value = "task-1780707" [ 1281.614826] env[62525]: _type = "Task" [ 1281.614826] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.624522] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.647983] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780704, 'name': PowerOffVM_Task, 'duration_secs': 0.233847} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.648456] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1281.648902] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1281.648966] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41f5cf17-174c-4ab3-acd8-49dd3270a592 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.718774] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1281.719012] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1281.719209] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleting the datastore file [datastore1] 5c9ca73a-bc48-4a75-89c8-03def719e488 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.719480] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4258cd7-fd61-4836-9c8e-b9b5b87e5c54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.726844] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for the task: (returnval){ [ 1281.726844] env[62525]: value = "task-1780709" [ 1281.726844] env[62525]: _type = "Task" [ 1281.726844] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.736020] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.736179] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.736476] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.737506] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.872659] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780705, 'name': ReconfigVM_Task, 'duration_secs': 0.287228} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.873264] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3/8adc8b4b-1087-4a11-9ee8-d897f1aa83f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1281.874190] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a6f13ba-7d87-497c-8c49-3fb4c6dc5d85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.894739] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1281.894739] env[62525]: value = "task-1780710" [ 1281.894739] env[62525]: _type = "Task" [ 1281.894739] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.895100] env[62525]: DEBUG oslo_vmware.api [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1780706, 'name': PowerOnVM_Task, 'duration_secs': 0.484792} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.895541] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.895810] env[62525]: INFO nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1281.896086] env[62525]: DEBUG nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1281.903198] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a4e28b-5e2d-4625-841a-5c94e869073b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.920485] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780710, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.949047] env[62525]: DEBUG nova.compute.utils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1281.954537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.954923] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1281.955123] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1282.125051] env[62525]: DEBUG nova.network.neutron [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updated VIF entry in instance network info cache for port 9c337d27-bc69-4787-a533-f523faa8aa10. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1282.125349] env[62525]: DEBUG nova.network.neutron [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.132312] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780707, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.154505] env[62525]: DEBUG nova.policy [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b782b3a022274487a8905215c8c22199', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3e97c55cacc4c2eb618db46fae9f5eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1282.242451] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.388333] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1282.405728] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780710, 'name': Rename_Task, 'duration_secs': 0.235818} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.406068] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1282.406337] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0008ce1-a246-4014-a0dd-9ee90aeea86a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.412599] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1282.412599] env[62525]: value = "task-1780711" [ 1282.412599] env[62525]: _type = "Task" [ 1282.412599] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.420610] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.442152] env[62525]: INFO nova.compute.manager [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Took 12.52 seconds to build instance. [ 1282.454178] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1282.631989] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780707, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615085} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.632789] env[62525]: DEBUG oslo_concurrency.lockutils [req-84fb4cb8-9de9-4c7f-b6d8-08a98a1e82d4 req-1033b105-f3eb-4e67-a812-b64a5284bc54 service nova] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.633334] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07/5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1282.633770] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1282.634270] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cc35e75-b41d-4022-b994-c1be8a0c347f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.645364] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1282.645364] env[62525]: value = "task-1780712" [ 1282.645364] env[62525]: _type = "Task" [ 1282.645364] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.648573] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0555f547-a76b-46ce-b07d-e4cb8aa27fbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.664281] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0502a27a-c97d-4078-9090-c798ea95ce70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.669357] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780712, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.709022] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28f609d-e316-44f9-9e26-fa2c59fc6044 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.717832] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8129a4f-33ca-4ccc-a20b-c117c705d45d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.737983] env[62525]: DEBUG nova.compute.provider_tree [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.746771] env[62525]: DEBUG oslo_vmware.api [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Task: {'id': task-1780709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.565843} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.746915] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.747112] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1282.747280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1282.747442] env[62525]: INFO nova.compute.manager [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1282.747702] env[62525]: DEBUG oslo.service.loopingcall [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.748580] env[62525]: DEBUG nova.compute.manager [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1282.748671] env[62525]: DEBUG nova.network.neutron [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1282.923320] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780711, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.933721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "82ea280a-4e1b-4fac-a634-7f79ce731564" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.934224] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.944109] env[62525]: DEBUG oslo_concurrency.lockutils [None req-727f9233-9b55-4c77-a289-767471057184 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.048s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.163231] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780712, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073081} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.163515] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1283.164642] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64854a2a-aaa4-45bd-bdaf-8f6ff136033b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.193130] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07/5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1283.193647] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3434e1f5-408f-460b-b492-e9376cc502cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.216388] env[62525]: DEBUG nova.network.neutron [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Updating instance_info_cache with network_info: [{"id": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "address": "fa:16:3e:9e:40:a0", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53bbc3ac-7d", "ovs_interfaceid": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.222238] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1283.222238] env[62525]: value = "task-1780713" [ 1283.222238] env[62525]: _type = "Task" [ 1283.222238] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.224694] env[62525]: DEBUG nova.network.neutron [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updated VIF entry in instance network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.225132] env[62525]: DEBUG nova.network.neutron [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.230319] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780713, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.242707] env[62525]: DEBUG nova.scheduler.client.report [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.423091] env[62525]: DEBUG oslo_vmware.api [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1780711, 'name': PowerOnVM_Task, 'duration_secs': 0.613634} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.423377] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1283.423592] env[62525]: INFO nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Took 11.48 seconds to spawn the instance on the hypervisor. [ 1283.423954] env[62525]: DEBUG nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1283.424814] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2445a416-4b2a-4817-bcf0-1debf89eb468 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.437994] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1283.466637] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1283.504395] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1283.504643] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1283.504792] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1283.504962] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1283.505128] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1283.505280] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1283.505541] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1283.505610] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1283.505842] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1283.505957] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1283.506151] env[62525]: DEBUG nova.virt.hardware [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1283.507307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce53b931-447a-48ef-a7b5-06cd597bef87 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.516860] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d45255-7011-4190-affb-7640f05cea1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.650324] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Successfully created port: 40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1283.719496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.719797] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Instance network_info: |[{"id": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "address": "fa:16:3e:9e:40:a0", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53bbc3ac-7d", "ovs_interfaceid": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1283.720329] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:40:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53bbc3ac-7df3-4d0a-a947-3866d3c4460b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1283.735950] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Creating folder: Project (a380a12ff0444c989a3a42dbaf5d579e). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1283.740990] env[62525]: DEBUG oslo_concurrency.lockutils [req-c923d550-6368-4423-9cee-ea01655fc9f9 req-d30c7d92-6a4f-484c-94de-4f735a641531 service nova] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.741518] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f26d9a4-3b40-45cf-907b-02122680f0cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.751702] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.752370] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1283.759546] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780713, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.762203] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.808s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.764438] env[62525]: INFO nova.compute.claims [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.770146] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Created folder: Project (a380a12ff0444c989a3a42dbaf5d579e) in parent group-v369553. [ 1283.770146] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Creating folder: Instances. Parent ref: group-v369569. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1283.770146] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aeff4769-36b8-48c4-8388-9ec647db104d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.779408] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Created folder: Instances in parent group-v369569. [ 1283.779983] env[62525]: DEBUG oslo.service.loopingcall [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.780336] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1283.780688] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7795388-f5e5-49db-b102-533b28b4ada0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.808361] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1283.808361] env[62525]: value = "task-1780716" [ 1283.808361] env[62525]: _type = "Task" [ 1283.808361] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.817593] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780716, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.970735] env[62525]: INFO nova.compute.manager [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Took 16.39 seconds to build instance. [ 1283.978324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.231811] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780713, 'name': ReconfigVM_Task, 'duration_secs': 0.684113} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.233901] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07/5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1284.234757] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a5e0e2b-52fe-440a-a4eb-664abbe126f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.242254] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1284.242254] env[62525]: value = "task-1780717" [ 1284.242254] env[62525]: _type = "Task" [ 1284.242254] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.253748] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780717, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.261609] env[62525]: DEBUG nova.compute.utils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1284.262456] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1284.262456] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1284.323780] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780716, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.328415] env[62525]: DEBUG nova.network.neutron [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.358373] env[62525]: DEBUG nova.policy [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b254a8715dfc48a7bab5ad253e09507c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '557d744dc35943aab165225698db81bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1284.450624] env[62525]: DEBUG nova.compute.manager [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Received event network-vif-plugged-53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1284.451392] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Acquiring lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.451559] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.451736] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.451906] env[62525]: DEBUG nova.compute.manager [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] No waiting events found dispatching network-vif-plugged-53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1284.452179] env[62525]: WARNING nova.compute.manager [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Received unexpected event network-vif-plugged-53bbc3ac-7df3-4d0a-a947-3866d3c4460b for instance with vm_state building and task_state spawning. [ 1284.452608] env[62525]: DEBUG nova.compute.manager [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Received event network-changed-53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1284.453073] env[62525]: DEBUG nova.compute.manager [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Refreshing instance network info cache due to event network-changed-53bbc3ac-7df3-4d0a-a947-3866d3c4460b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1284.453288] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Acquiring lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.453574] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Acquired lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.453818] env[62525]: DEBUG nova.network.neutron [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Refreshing network info cache for port 53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.473627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f36edeec-7eea-4e7a-a3f2-54ce9f47c331 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.898s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.626669] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Successfully updated port: bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1284.760634] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780717, 'name': Rename_Task, 'duration_secs': 0.206334} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.761035] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1284.761324] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d42bb9d-6548-4df0-a363-3a14777a176e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.767362] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1284.776028] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1284.776028] env[62525]: value = "task-1780718" [ 1284.776028] env[62525]: _type = "Task" [ 1284.776028] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.797601] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780718, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.823630] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780716, 'name': CreateVM_Task, 'duration_secs': 0.620447} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.826480] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1284.828631] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1284.829019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.829517] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1284.830132] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ecb9675-cd08-4e7e-b5ab-5d7a93fea4d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.835808] env[62525]: INFO nova.compute.manager [-] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Took 2.09 seconds to deallocate network for instance. [ 1284.836182] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1284.836182] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521a140f-c82e-2a8f-67b3-83029df6a6b9" [ 1284.836182] env[62525]: _type = "Task" [ 1284.836182] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.856038] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a140f-c82e-2a8f-67b3-83029df6a6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.922021] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Successfully created port: ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1284.997942] env[62525]: DEBUG nova.compute.manager [req-2d74ab3a-a2b1-41a3-95a9-4a12e3693e3e req-e93201cf-813c-4a44-9648-02e5e94b1805 service nova] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Received event network-vif-deleted-b5fcb1bb-8ada-40c9-8436-fb190e5aef33 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1285.008081] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df26d7c0-3713-4f45-986f-e18e4592a89f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.015516] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e164e6e7-0d29-4d08-a963-7d4f7a3e398a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.051049] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a9dffa-316c-4255-81a7-055a79b8ba1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.060199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2888a66d-5992-47d1-a5be-026243701661 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.078175] env[62525]: DEBUG nova.compute.provider_tree [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.131598] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.132457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquired lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.132457] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.299691] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780718, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.352365] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.352692] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a140f-c82e-2a8f-67b3-83029df6a6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.039493} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.352961] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.353222] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1285.356605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.356605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.356605] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1285.356605] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29bc1529-1acb-4ee1-aa4c-33a744ff7d01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.384093] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1285.384299] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1285.385068] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a1ce611-c963-4952-b4a7-9fbfc791cef4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.393576] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1285.393576] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c1f916-9071-498f-31bd-c277149c546a" [ 1285.393576] env[62525]: _type = "Task" [ 1285.393576] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.402594] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1f916-9071-498f-31bd-c277149c546a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.432668] env[62525]: DEBUG nova.network.neutron [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Updated VIF entry in instance network info cache for port 53bbc3ac-7df3-4d0a-a947-3866d3c4460b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1285.432828] env[62525]: DEBUG nova.network.neutron [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Updating instance_info_cache with network_info: [{"id": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "address": "fa:16:3e:9e:40:a0", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53bbc3ac-7d", "ovs_interfaceid": "53bbc3ac-7df3-4d0a-a947-3866d3c4460b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.580540] env[62525]: DEBUG nova.scheduler.client.report [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1285.687889] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1285.788516] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1285.802607] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780718, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.820371] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1285.820371] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1285.820742] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.820998] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1285.821448] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.821448] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1285.821625] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1285.821936] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1285.822042] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1285.822216] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1285.822548] env[62525]: DEBUG nova.virt.hardware [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1285.824058] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857f26c0-b468-4f10-931e-5936c0164508 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.836091] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e88d15-9a26-484c-8316-c0863276ca13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.907207] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1f916-9071-498f-31bd-c277149c546a, 'name': SearchDatastore_Task, 'duration_secs': 0.026722} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.907484] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eafddb6f-df97-4987-a025-cb9d3635de92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.915027] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1285.915027] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522c6586-35ee-fe49-4a9b-3e0b2e8d12a9" [ 1285.915027] env[62525]: _type = "Task" [ 1285.915027] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.926601] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522c6586-35ee-fe49-4a9b-3e0b2e8d12a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.936628] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b6f2182-1a6a-4d43-8694-57c4da1986af req-7561d540-b459-4063-a8db-d92705174c23 service nova] Releasing lock "refresh_cache-9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.072373] env[62525]: DEBUG nova.network.neutron [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updating instance_info_cache with network_info: [{"id": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "address": "fa:16:3e:73:35:0d", "network": {"id": "6d708e5d-ee5e-4669-a392-953aae2a8af9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1717084220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d45bea689ee84b988655da8fff7c4546", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb0ccd32-fa", "ovs_interfaceid": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.088657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.089557] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1286.094115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.114s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.096929] env[62525]: INFO nova.compute.claims [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1286.312906] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780718, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.336817] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Successfully updated port: 40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1286.431465] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522c6586-35ee-fe49-4a9b-3e0b2e8d12a9, 'name': SearchDatastore_Task, 'duration_secs': 0.014589} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.431465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.434185] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6/9a7bfafe-8598-4c6f-9714-0567fcbb8ea6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1286.434185] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9203604-c9c7-40ae-ac2e-f09da8af0781 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.441615] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1286.441615] env[62525]: value = "task-1780719" [ 1286.441615] env[62525]: _type = "Task" [ 1286.441615] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.453455] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.579765] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Releasing lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.580213] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Instance network_info: |[{"id": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "address": "fa:16:3e:73:35:0d", "network": {"id": "6d708e5d-ee5e-4669-a392-953aae2a8af9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1717084220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d45bea689ee84b988655da8fff7c4546", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb0ccd32-fa", "ovs_interfaceid": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1286.580851] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:35:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb0ccd32-fa3c-4e68-98dc-c81a3f541a88', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1286.589755] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Creating folder: Project (d45bea689ee84b988655da8fff7c4546). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.590534] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a153d5e-2842-4c2b-9afb-b5563473c11f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.601348] env[62525]: DEBUG nova.compute.utils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1286.606738] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1286.606940] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1286.609478] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Created folder: Project (d45bea689ee84b988655da8fff7c4546) in parent group-v369553. [ 1286.609691] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Creating folder: Instances. Parent ref: group-v369572. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.610174] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-256c1a23-c41c-409b-baf4-600a9cd9f976 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.622130] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Created folder: Instances in parent group-v369572. [ 1286.622407] env[62525]: DEBUG oslo.service.loopingcall [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1286.622608] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1286.623446] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f080df8a-f5ee-4bef-843b-5101aad2021c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.645447] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1286.645447] env[62525]: value = "task-1780722" [ 1286.645447] env[62525]: _type = "Task" [ 1286.645447] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.655488] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780722, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.787691] env[62525]: DEBUG nova.policy [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b254a8715dfc48a7bab5ad253e09507c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '557d744dc35943aab165225698db81bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1286.801731] env[62525]: DEBUG oslo_vmware.api [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780718, 'name': PowerOnVM_Task, 'duration_secs': 1.560804} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.802061] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.802264] env[62525]: INFO nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Took 10.34 seconds to spawn the instance on the hypervisor. [ 1286.802461] env[62525]: DEBUG nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1286.804506] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5c31a1-4914-4f64-92bc-4163d6907583 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.842854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.842854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquired lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.842854] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.954714] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780719, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.992409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.992591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.106483] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1287.157220] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780722, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.328496] env[62525]: INFO nova.compute.manager [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Took 15.51 seconds to build instance. [ 1287.331949] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0f8b44-43cf-432f-af28-c99ce356b1fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.343679] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573e8088-0fb2-4cc0-9687-b7932a1dabd5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.387037] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15aca64c-a0be-47dc-b1e0-8aca5f70a9ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.396119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "5bffec39-0b09-49a0-a862-560720db45cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.396119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.396119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "5bffec39-0b09-49a0-a862-560720db45cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.396119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.396466] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.397382] env[62525]: INFO nova.compute.manager [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Terminating instance [ 1287.401121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f00570-8076-4c15-8ad0-52241c9d478a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.404412] env[62525]: DEBUG nova.compute.manager [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1287.404609] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1287.405452] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b506d625-53d7-4964-96a8-17a2ddbb9bd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.418979] env[62525]: DEBUG nova.compute.provider_tree [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1287.422556] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1287.423301] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c3b1ab5-e7a1-4221-83b3-bb5d0da6450b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.429426] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1287.429426] env[62525]: value = "task-1780723" [ 1287.429426] env[62525]: _type = "Task" [ 1287.429426] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.439095] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.453088] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853961} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.453088] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6/9a7bfafe-8598-4c6f-9714-0567fcbb8ea6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1287.453088] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1287.453088] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-301b664a-4718-4d9f-ac42-2e55f8a8f488 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.458854] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1287.458854] env[62525]: value = "task-1780724" [ 1287.458854] env[62525]: _type = "Task" [ 1287.458854] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.467180] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.497891] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1287.500525] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1287.661718] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780722, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.838953] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e42b30d3-62d3-4da3-98d0-91edefd79b72 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.027s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.943758] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.959319] env[62525]: ERROR nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [req-fbf12bcb-cf0a-4d82-98de-aeff6f79b166] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fbf12bcb-cf0a-4d82-98de-aeff6f79b166"}]} [ 1287.973496] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.975551] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Successfully updated port: ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1287.998036] env[62525]: DEBUG nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1288.029450] env[62525]: DEBUG nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1288.030310] env[62525]: DEBUG nova.compute.provider_tree [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1288.033755] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.046265] env[62525]: DEBUG nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1288.059028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "aa639aa3-d21c-4923-bc39-56e648c566fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.059028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.071608] env[62525]: DEBUG nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1288.123129] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1288.149328] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Successfully created port: 42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1288.163170] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1288.163371] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1288.163521] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1288.163692] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1288.163828] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1288.163964] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1288.164179] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1288.164368] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1288.164477] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1288.165233] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1288.165233] env[62525]: DEBUG nova.virt.hardware [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1288.166113] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac5db03-8388-4a8e-8265-e875e48da700 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.176266] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780722, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.183222] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194f7f78-dd1b-4fe0-808e-f32aac976c4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.215282] env[62525]: DEBUG nova.network.neutron [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Updating instance_info_cache with network_info: [{"id": "40988ca1-f187-490a-9770-d08a56e6b866", "address": "fa:16:3e:b2:95:d9", "network": {"id": "3e09033e-e0df-49bc-8c34-96574b528391", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-511384985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3e97c55cacc4c2eb618db46fae9f5eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40988ca1-f1", "ovs_interfaceid": "40988ca1-f187-490a-9770-d08a56e6b866", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.328976] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960fca75-d3b7-45dd-a420-45e14681edbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.337647] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84fd744-899a-4b8a-bb01-909c32a6e0a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.374088] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a985393b-2d4e-4b41-a6bc-dd3af8ef5320 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.384096] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eba1dd7-a1f4-4824-a04f-a8dcc5487bce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.400205] env[62525]: DEBUG nova.compute.provider_tree [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1288.443446] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.473210] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.86539} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.473210] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1288.473210] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14814c16-6713-4c0d-b813-3f0cef54d566 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.491859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.491859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.491859] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1288.502489] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6/9a7bfafe-8598-4c6f-9714-0567fcbb8ea6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1288.503241] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9198e290-7b51-4dd5-8300-a7879f25ecbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.527841] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1288.527841] env[62525]: value = "task-1780725" [ 1288.527841] env[62525]: _type = "Task" [ 1288.527841] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.536557] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.563531] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1288.659958] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780722, 'name': CreateVM_Task, 'duration_secs': 1.589572} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.660207] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1288.660903] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.661038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.661504] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1288.661581] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f3f0c50-3729-443d-b4c2-c195ed1892d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.667084] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1288.667084] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52540151-641e-3ec6-7c45-92586cfc82ac" [ 1288.667084] env[62525]: _type = "Task" [ 1288.667084] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.675759] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52540151-641e-3ec6-7c45-92586cfc82ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.720245] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Releasing lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.721024] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Instance network_info: |[{"id": "40988ca1-f187-490a-9770-d08a56e6b866", "address": "fa:16:3e:b2:95:d9", "network": {"id": "3e09033e-e0df-49bc-8c34-96574b528391", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-511384985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3e97c55cacc4c2eb618db46fae9f5eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40988ca1-f1", "ovs_interfaceid": "40988ca1-f187-490a-9770-d08a56e6b866", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1288.721436] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:95:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40988ca1-f187-490a-9770-d08a56e6b866', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1288.732483] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Creating folder: Project (f3e97c55cacc4c2eb618db46fae9f5eb). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1288.733431] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-104553d5-1c66-4fb2-8406-77dcf5cc6616 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.744220] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Created folder: Project (f3e97c55cacc4c2eb618db46fae9f5eb) in parent group-v369553. [ 1288.745279] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Creating folder: Instances. Parent ref: group-v369575. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1288.745279] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33406ba6-d29d-4010-9554-ae7ec9f03759 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.757978] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Created folder: Instances in parent group-v369575. [ 1288.757978] env[62525]: DEBUG oslo.service.loopingcall [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1288.758154] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1288.758511] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b798634c-e284-46d4-8543-c90c2940e9f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.781924] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1288.781924] env[62525]: value = "task-1780728" [ 1288.781924] env[62525]: _type = "Task" [ 1288.781924] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.792729] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780728, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.942961] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780723, 'name': PowerOffVM_Task, 'duration_secs': 1.105599} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.943968] env[62525]: DEBUG nova.scheduler.client.report [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1288.944959] env[62525]: DEBUG nova.compute.provider_tree [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 17 to 18 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1288.944959] env[62525]: DEBUG nova.compute.provider_tree [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1288.949156] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.949388] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.950970] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cc85bf3-2608-4c84-a2c0-6584b0f30515 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.026516] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.026761] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.026969] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Deleting the datastore file [datastore1] 5bffec39-0b09-49a0-a862-560720db45cd {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.027318] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9033c3a-0291-4816-92d2-a3867e357416 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.049061] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780725, 'name': ReconfigVM_Task, 'duration_secs': 0.305046} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.050030] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for the task: (returnval){ [ 1289.050030] env[62525]: value = "task-1780730" [ 1289.050030] env[62525]: _type = "Task" [ 1289.050030] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.050030] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6/9a7bfafe-8598-4c6f-9714-0567fcbb8ea6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.050922] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e7631e4-8253-414e-aa8f-82b8c5fd0613 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.057351] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1289.076990] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.082395] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1289.082395] env[62525]: value = "task-1780731" [ 1289.082395] env[62525]: _type = "Task" [ 1289.082395] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.106881] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780731, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.109082] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.183227] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52540151-641e-3ec6-7c45-92586cfc82ac, 'name': SearchDatastore_Task, 'duration_secs': 0.025233} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.183227] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.183227] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1289.183227] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.183528] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.183528] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1289.183528] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6577a4af-b1ef-4727-a705-f49948ecbd5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.202308] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Received event network-vif-plugged-40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1289.202550] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Acquiring lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.202742] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.202913] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.204070] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] No waiting events found dispatching network-vif-plugged-40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1289.204690] env[62525]: WARNING nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Received unexpected event network-vif-plugged-40988ca1-f187-490a-9770-d08a56e6b866 for instance with vm_state building and task_state spawning. [ 1289.204690] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Received event network-changed-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1289.204690] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Refreshing instance network info cache due to event network-changed-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1289.204690] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Acquiring lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.204922] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Acquired lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.205453] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Refreshing network info cache for port 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1289.209567] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1289.209823] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1289.210907] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe68793-4700-43b2-bea4-04e9630d57cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.220270] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1289.220270] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ce6ad0-36bc-b57e-750a-406df142ccee" [ 1289.220270] env[62525]: _type = "Task" [ 1289.220270] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.233206] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ce6ad0-36bc-b57e-750a-406df142ccee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.302932] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780728, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.307055] env[62525]: DEBUG nova.network.neutron [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updating instance_info_cache with network_info: [{"id": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "address": "fa:16:3e:9a:97:9c", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce676e95-1f", "ovs_interfaceid": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.454674] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.362s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.455402] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1289.458698] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.106s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.458698] env[62525]: DEBUG nova.objects.instance [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lazy-loading 'resources' on Instance uuid 5c9ca73a-bc48-4a75-89c8-03def719e488 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1289.562354] env[62525]: DEBUG oslo_vmware.api [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Task: {'id': task-1780730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193329} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.562620] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.562780] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.562953] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.563139] env[62525]: INFO nova.compute.manager [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1289.563385] env[62525]: DEBUG oslo.service.loopingcall [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.563570] env[62525]: DEBUG nova.compute.manager [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1289.563678] env[62525]: DEBUG nova.network.neutron [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.602036] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780731, 'name': Rename_Task, 'duration_secs': 0.184878} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.602036] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1289.602225] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95525dbc-d8df-4162-ad2a-6720dd1e683c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.611020] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1289.611020] env[62525]: value = "task-1780732" [ 1289.611020] env[62525]: _type = "Task" [ 1289.611020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.622094] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.732622] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ce6ad0-36bc-b57e-750a-406df142ccee, 'name': SearchDatastore_Task, 'duration_secs': 0.013037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.732908] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7fc80c3-9667-4c7d-8a8d-140e73710330 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.739993] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1289.739993] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a94932-0362-9932-019c-a0a425f338eb" [ 1289.739993] env[62525]: _type = "Task" [ 1289.739993] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.752436] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a94932-0362-9932-019c-a0a425f338eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.797902] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780728, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.814032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.814032] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Instance network_info: |[{"id": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "address": "fa:16:3e:9a:97:9c", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce676e95-1f", "ovs_interfaceid": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1289.814299] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:97:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce676e95-1fd5-4abf-9228-aa35cc8606e6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1289.824608] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Creating folder: Project (557d744dc35943aab165225698db81bf). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1289.825563] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c173037d-ec2b-4ed2-a864-a4fc22edadc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.838211] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Created folder: Project (557d744dc35943aab165225698db81bf) in parent group-v369553. [ 1289.838532] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Creating folder: Instances. Parent ref: group-v369578. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1289.838667] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-135740b7-8043-4db2-a85e-a33343839048 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.846206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.846342] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.849649] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Created folder: Instances in parent group-v369578. [ 1289.849914] env[62525]: DEBUG oslo.service.loopingcall [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.850117] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1289.850321] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19d9ac90-9ae6-4175-b27a-75a2ac6e00a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.877328] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1289.877328] env[62525]: value = "task-1780735" [ 1289.877328] env[62525]: _type = "Task" [ 1289.877328] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.887420] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780735, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.963794] env[62525]: DEBUG nova.compute.utils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1289.969833] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1289.969833] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1289.989124] env[62525]: DEBUG nova.compute.manager [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Received event network-vif-plugged-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1289.989346] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Acquiring lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.989551] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.989756] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.989901] env[62525]: DEBUG nova.compute.manager [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] No waiting events found dispatching network-vif-plugged-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1289.990078] env[62525]: WARNING nova.compute.manager [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Received unexpected event network-vif-plugged-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 for instance with vm_state building and task_state spawning. [ 1289.990229] env[62525]: DEBUG nova.compute.manager [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Received event network-changed-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1289.990434] env[62525]: DEBUG nova.compute.manager [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Refreshing instance network info cache due to event network-changed-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1289.990554] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Acquiring lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1289.991380] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Acquired lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.991380] env[62525]: DEBUG nova.network.neutron [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Refreshing network info cache for port bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.020757] env[62525]: DEBUG nova.policy [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b254a8715dfc48a7bab5ad253e09507c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '557d744dc35943aab165225698db81bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1290.083867] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.084214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.124548] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780732, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.236723] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updated VIF entry in instance network info cache for port 4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.236723] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updating instance_info_cache with network_info: [{"id": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "address": "fa:16:3e:67:c6:a3", "network": {"id": "5540caf8-99d8-46af-9d9a-9a73ba235686", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-477507757-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689156401b324ce3a5021a6079c5b27b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eeddc3f-2e", "ovs_interfaceid": "4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.261106] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a94932-0362-9932-019c-a0a425f338eb, 'name': SearchDatastore_Task, 'duration_secs': 0.029181} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.261285] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.261791] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] deef59c8-f710-434d-bddc-f63bb3d518b1/deef59c8-f710-434d-bddc-f63bb3d518b1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1290.262240] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f49cace-a4d7-457d-9a2b-1813fc49ef14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.276267] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1290.276267] env[62525]: value = "task-1780736" [ 1290.276267] env[62525]: _type = "Task" [ 1290.276267] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.285307] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.286807] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9e53fa-b17c-4f77-a80c-5369d86bc230 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.299950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b24e25f-a490-4751-b94b-46d7c3563534 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.303427] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780728, 'name': CreateVM_Task, 'duration_secs': 1.488287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.303880] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1290.305417] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.305417] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.305417] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1290.305417] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a81bce6-d3b2-421d-acb5-4e4ec66956ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.334142] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1150b3b6-47a2-4d6f-b9a2-0832efd0d658 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.338376] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1290.338376] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dd6d0b-292f-a169-edb7-ad43b0edcb44" [ 1290.338376] env[62525]: _type = "Task" [ 1290.338376] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.345062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e564cba5-aa27-4ebb-bd32-33f71524a6f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.352122] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1290.355077] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dd6d0b-292f-a169-edb7-ad43b0edcb44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.366009] env[62525]: DEBUG nova.compute.provider_tree [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1290.388844] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780735, 'name': CreateVM_Task, 'duration_secs': 0.460739} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.388844] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1290.389150] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.409861] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Successfully created port: 4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1290.469592] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1290.572454] env[62525]: DEBUG nova.network.neutron [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.591789] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1290.621016] env[62525]: DEBUG oslo_vmware.api [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780732, 'name': PowerOnVM_Task, 'duration_secs': 0.673177} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.621298] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1290.621495] env[62525]: INFO nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Took 11.93 seconds to spawn the instance on the hypervisor. [ 1290.621672] env[62525]: DEBUG nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1290.622697] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ba5a12-a125-4782-8a41-ea9aeb6dbb55 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.742164] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Releasing lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.742472] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Received event network-changed-40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1290.742739] env[62525]: DEBUG nova.compute.manager [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Refreshing instance network info cache due to event network-changed-40988ca1-f187-490a-9770-d08a56e6b866. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1290.742919] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Acquiring lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.743074] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Acquired lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.743236] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Refreshing network info cache for port 40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.790850] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780736, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.853051] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dd6d0b-292f-a169-edb7-ad43b0edcb44, 'name': SearchDatastore_Task, 'duration_secs': 0.019725} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.853799] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.854190] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1290.854837] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.855262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.855573] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.856238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.859012] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1290.861156] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5df6a43-3d48-4bce-8650-1e75b55ec7b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.871976] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9987d23-527a-4fbc-8f4c-6ff3ebd8fa23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.881657] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1290.881657] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cdddd1-04e0-2321-b26f-9b3fe8959476" [ 1290.881657] env[62525]: _type = "Task" [ 1290.881657] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.883017] env[62525]: DEBUG nova.network.neutron [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updated VIF entry in instance network info cache for port bb0ccd32-fa3c-4e68-98dc-c81a3f541a88. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.883017] env[62525]: DEBUG nova.network.neutron [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updating instance_info_cache with network_info: [{"id": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "address": "fa:16:3e:73:35:0d", "network": {"id": "6d708e5d-ee5e-4669-a392-953aae2a8af9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1717084220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d45bea689ee84b988655da8fff7c4546", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb0ccd32-fa", "ovs_interfaceid": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.890950] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.891982] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1290.895289] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d3bb7cb-011c-428a-ba4c-5c6b7849560d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.899355] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdddd1-04e0-2321-b26f-9b3fe8959476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.900437] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.903527] env[62525]: ERROR nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] [req-9ebde468-7ea6-42e0-a6c9-aaf8963f6736] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9ebde468-7ea6-42e0-a6c9-aaf8963f6736"}]} [ 1290.907645] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1290.907645] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5271b9ab-d63f-e22b-c57a-d98a3a8e71a6" [ 1290.907645] env[62525]: _type = "Task" [ 1290.907645] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.920106] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5271b9ab-d63f-e22b-c57a-d98a3a8e71a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.926488] env[62525]: DEBUG nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1290.951387] env[62525]: DEBUG nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1290.951653] env[62525]: DEBUG nova.compute.provider_tree [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1290.970193] env[62525]: DEBUG nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1291.000832] env[62525]: DEBUG nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1291.075156] env[62525]: INFO nova.compute.manager [-] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Took 1.51 seconds to deallocate network for instance. [ 1291.129548] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.146203] env[62525]: INFO nova.compute.manager [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Took 17.23 seconds to build instance. [ 1291.288949] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.879458} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.292071] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] deef59c8-f710-434d-bddc-f63bb3d518b1/deef59c8-f710-434d-bddc-f63bb3d518b1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1291.292071] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1291.293192] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa3f860d-9d76-4224-ae30-f9100b544be3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.300299] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1291.300299] env[62525]: value = "task-1780737" [ 1291.300299] env[62525]: _type = "Task" [ 1291.300299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.309954] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6480dc8c-3df1-44c9-8eb4-e14a73e448cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.321682] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.321682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19e7e6f-93f9-4125-b0ef-caa088b05a18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.354367] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8fc7d3-35dc-43fb-84f5-36ab7d0ff75a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.362523] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Successfully updated port: 42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1291.365570] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5c09bb-60e0-4cac-9fb8-3c970c50e0e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.382144] env[62525]: DEBUG nova.compute.provider_tree [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.389026] env[62525]: DEBUG oslo_concurrency.lockutils [req-25ec1434-f36b-4380-a2c0-f0a0962b16e1 req-a8a4ca6b-755b-4325-848f-8fcb50460338 service nova] Releasing lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.402105] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdddd1-04e0-2321-b26f-9b3fe8959476, 'name': SearchDatastore_Task, 'duration_secs': 0.065707} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.403881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.403881] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1291.403881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.418981] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5271b9ab-d63f-e22b-c57a-d98a3a8e71a6, 'name': SearchDatastore_Task, 'duration_secs': 0.079972} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.420096] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-701ca4e5-66f7-45d5-8dbd-548c941b27ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.428127] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1291.428127] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dfbccd-429e-77aa-27cd-77731c0c92d6" [ 1291.428127] env[62525]: _type = "Task" [ 1291.428127] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.437344] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dfbccd-429e-77aa-27cd-77731c0c92d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.484296] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1291.539960] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1291.540308] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1291.540571] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.540827] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1291.541023] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.541228] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1291.542949] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1291.542949] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1291.542949] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1291.542949] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1291.542949] env[62525]: DEBUG nova.virt.hardware [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1291.543952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4e6ff3-a6d0-4e0b-b1eb-5f2d7ea18419 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.558119] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cda41ca-8f4a-45f8-a815-00852d149484 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.584557] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.647118] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Updated VIF entry in instance network info cache for port 40988ca1-f187-490a-9770-d08a56e6b866. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1291.647118] env[62525]: DEBUG nova.network.neutron [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Updating instance_info_cache with network_info: [{"id": "40988ca1-f187-490a-9770-d08a56e6b866", "address": "fa:16:3e:b2:95:d9", "network": {"id": "3e09033e-e0df-49bc-8c34-96574b528391", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-511384985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3e97c55cacc4c2eb618db46fae9f5eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40988ca1-f1", "ovs_interfaceid": "40988ca1-f187-490a-9770-d08a56e6b866", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.649057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ccc3230-50e5-40ab-8abe-d7ad77dce542 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.758s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.812356] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131582} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.812642] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1291.813579] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2290cad0-fa83-4ea6-8f9a-007f7c0d9019 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.839822] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] deef59c8-f710-434d-bddc-f63bb3d518b1/deef59c8-f710-434d-bddc-f63bb3d518b1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1291.840169] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf540f7e-dbf7-4636-a58b-3d71fa1b34ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.863418] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1291.863418] env[62525]: value = "task-1780738" [ 1291.863418] env[62525]: _type = "Task" [ 1291.863418] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.873189] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.873402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.873739] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1291.874970] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780738, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.890550] env[62525]: DEBUG nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1291.939800] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dfbccd-429e-77aa-27cd-77731c0c92d6, 'name': SearchDatastore_Task, 'duration_secs': 0.011625} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.939800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.939800] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8c6e22d6-353f-4be5-8400-7fe38a9bee25/8c6e22d6-353f-4be5-8400-7fe38a9bee25.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1291.939962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.940126] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1291.940334] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f9e11fa-ea75-44b6-87ae-29dca7c1ede7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.942907] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97291b06-f16b-4467-b645-9ab152ccb522 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.949688] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1291.949688] env[62525]: value = "task-1780739" [ 1291.949688] env[62525]: _type = "Task" [ 1291.949688] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.955477] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1291.955477] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1291.955971] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f145cd30-d829-46f0-b50c-a5fd534d96ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.962082] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.965545] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1291.965545] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52118add-37b9-1dc0-3958-d17be5d673a0" [ 1291.965545] env[62525]: _type = "Task" [ 1291.965545] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.974190] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52118add-37b9-1dc0-3958-d17be5d673a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.148525] env[62525]: DEBUG oslo_concurrency.lockutils [req-07f185fe-244f-49b6-95b9-fd9faf75ecf0 req-3b79b4ef-13a8-4f17-8c76-512f2ae472a4 service nova] Releasing lock "refresh_cache-8c6e22d6-353f-4be5-8400-7fe38a9bee25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.193057] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Successfully updated port: 4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1292.374083] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780738, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.402606] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.942s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.403511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.369s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.406472] env[62525]: INFO nova.compute.claims [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1292.423057] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1292.449334] env[62525]: INFO nova.scheduler.client.report [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Deleted allocations for instance 5c9ca73a-bc48-4a75-89c8-03def719e488 [ 1292.462803] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.479587] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52118add-37b9-1dc0-3958-d17be5d673a0, 'name': SearchDatastore_Task, 'duration_secs': 0.064606} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.481867] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d04d9943-b199-45f8-9766-f98c2ff82f20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.490026] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1292.490026] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5268e818-d4bd-28e6-9478-00e70382d006" [ 1292.490026] env[62525]: _type = "Task" [ 1292.490026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.511328] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e818-d4bd-28e6-9478-00e70382d006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.634237] env[62525]: DEBUG nova.network.neutron [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Updating instance_info_cache with network_info: [{"id": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "address": "fa:16:3e:09:7a:7b", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ec407b-c2", "ovs_interfaceid": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.695993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.696705] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.696705] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1292.851126] env[62525]: DEBUG nova.compute.manager [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Received event network-vif-plugged-42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1292.852120] env[62525]: DEBUG oslo_concurrency.lockutils [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] Acquiring lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.852120] env[62525]: DEBUG oslo_concurrency.lockutils [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.852120] env[62525]: DEBUG oslo_concurrency.lockutils [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.852120] env[62525]: DEBUG nova.compute.manager [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] No waiting events found dispatching network-vif-plugged-42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1292.852120] env[62525]: WARNING nova.compute.manager [req-c978c96c-1344-47f4-b825-40b9f366c480 req-33f5e30d-ad37-4127-8e19-cdc9b5dca8be service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Received unexpected event network-vif-plugged-42ec407b-c27a-4d4f-9c35-6c5a65f5db02 for instance with vm_state building and task_state spawning. [ 1292.878534] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780738, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.965669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b0329bbc-51b8-4b46-983b-219c9c7c65a1 tempest-ServerDiagnosticsTest-775835216 tempest-ServerDiagnosticsTest-775835216-project-member] Lock "5c9ca73a-bc48-4a75-89c8-03def719e488" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.864s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.969144] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.000795] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e818-d4bd-28e6-9478-00e70382d006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.041978] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.042247] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.116616] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Received event network-vif-plugged-ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1293.116945] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Acquiring lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.118018] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.118018] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.118018] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] No waiting events found dispatching network-vif-plugged-ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1293.118018] env[62525]: WARNING nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Received unexpected event network-vif-plugged-ce676e95-1fd5-4abf-9228-aa35cc8606e6 for instance with vm_state building and task_state spawning. [ 1293.118266] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Received event network-changed-ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1293.118433] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Refreshing instance network info cache due to event network-changed-ce676e95-1fd5-4abf-9228-aa35cc8606e6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1293.118659] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Acquiring lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.118816] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Acquired lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.118992] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Refreshing network info cache for port ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1293.139543] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.139823] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Instance network_info: |[{"id": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "address": "fa:16:3e:09:7a:7b", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ec407b-c2", "ovs_interfaceid": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1293.140283] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:7a:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42ec407b-c27a-4d4f-9c35-6c5a65f5db02', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.152599] env[62525]: DEBUG oslo.service.loopingcall [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.153395] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1293.154503] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b31536f9-e71b-41cf-aed4-b4f56ff03289 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.189254] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.189254] env[62525]: value = "task-1780740" [ 1293.189254] env[62525]: _type = "Task" [ 1293.189254] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.201740] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780740, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.236140] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1293.313629] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.314274] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.374204] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780738, 'name': ReconfigVM_Task, 'duration_secs': 1.030643} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.374389] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Reconfigured VM instance instance-00000007 to attach disk [datastore1] deef59c8-f710-434d-bddc-f63bb3d518b1/deef59c8-f710-434d-bddc-f63bb3d518b1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1293.375009] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df1f1096-c176-40f3-b39e-f34fa036d0e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.382212] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1293.382212] env[62525]: value = "task-1780741" [ 1293.382212] env[62525]: _type = "Task" [ 1293.382212] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.393308] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780741, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.420888] env[62525]: DEBUG nova.network.neutron [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Updating instance_info_cache with network_info: [{"id": "4a5c70c1-3224-4684-a897-549395a4ae84", "address": "fa:16:3e:d3:16:24", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5c70c1-32", "ovs_interfaceid": "4a5c70c1-3224-4684-a897-549395a4ae84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.466026] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.502020] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e818-d4bd-28e6-9478-00e70382d006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.544877] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1293.702867] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780740, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.736994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94b24b1-ec4f-4b00-8379-f59b1823a2ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.749480] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef7caa9-fff1-404d-a5d6-3b5a3c8f6b1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.787733] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973952bb-7303-48f8-ae83-a7529fb9cb95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.795616] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee4edbc-87ee-47f8-8c79-ffe1b9423ef9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.818881] env[62525]: DEBUG nova.compute.provider_tree [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.897181] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780741, 'name': Rename_Task, 'duration_secs': 0.275117} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.897493] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1293.897739] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb7a38ca-77b3-4fa1-be51-b379dadebb41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.907268] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1293.907268] env[62525]: value = "task-1780742" [ 1293.907268] env[62525]: _type = "Task" [ 1293.907268] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.918197] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.924085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.924085] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Instance network_info: |[{"id": "4a5c70c1-3224-4684-a897-549395a4ae84", "address": "fa:16:3e:d3:16:24", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5c70c1-32", "ovs_interfaceid": "4a5c70c1-3224-4684-a897-549395a4ae84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1293.924362] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:16:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a5c70c1-3224-4684-a897-549395a4ae84', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.932626] env[62525]: DEBUG oslo.service.loopingcall [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.933086] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1293.933177] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-754f38ed-1fca-421d-92c1-3f232ce25bc6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.960513] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.960513] env[62525]: value = "task-1780743" [ 1293.960513] env[62525]: _type = "Task" [ 1293.960513] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.968452] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.975278] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780743, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.000740] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e818-d4bd-28e6-9478-00e70382d006, 'name': SearchDatastore_Task, 'duration_secs': 1.082338} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.003450] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.004409] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 98334a1b-1a73-408f-93a4-6dc72764ebfc/98334a1b-1a73-408f-93a4-6dc72764ebfc.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1294.004727] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdf8075b-ab10-43e8-9a87-011cda699ccd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.011680] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1294.011680] env[62525]: value = "task-1780744" [ 1294.011680] env[62525]: _type = "Task" [ 1294.011680] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.023422] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780744, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.066506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.079358] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updated VIF entry in instance network info cache for port ce676e95-1fd5-4abf-9228-aa35cc8606e6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1294.079782] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updating instance_info_cache with network_info: [{"id": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "address": "fa:16:3e:9a:97:9c", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce676e95-1f", "ovs_interfaceid": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.198916] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780740, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.246429] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.246429] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.323766] env[62525]: DEBUG nova.scheduler.client.report [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1294.422471] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780742, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.466530] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780739, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.137254} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.469548] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8c6e22d6-353f-4be5-8400-7fe38a9bee25/8c6e22d6-353f-4be5-8400-7fe38a9bee25.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1294.469857] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1294.470215] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad2b6ed0-0f87-4692-97cf-dac5c43a8bf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.479776] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780743, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.481905] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1294.481905] env[62525]: value = "task-1780745" [ 1294.481905] env[62525]: _type = "Task" [ 1294.481905] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.491466] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780745, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.523550] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780744, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.582970] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Releasing lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.582970] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-changed-9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1294.582970] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing instance network info cache due to event network-changed-9c337d27-bc69-4787-a533-f523faa8aa10. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1294.583197] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.584033] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.584033] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing network info cache for port 9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1294.701782] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780740, 'name': CreateVM_Task, 'duration_secs': 1.051112} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.702897] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1294.702897] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.703656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.704267] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1294.704665] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c60bed-b1a6-48c4-84ea-225428264bb2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.710440] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1294.710440] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52837677-0f88-c596-848a-8c36b771c2dd" [ 1294.710440] env[62525]: _type = "Task" [ 1294.710440] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.721163] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52837677-0f88-c596-848a-8c36b771c2dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.830550] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.831131] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1294.833737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.725s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.835207] env[62525]: INFO nova.compute.claims [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.926229] env[62525]: DEBUG oslo_vmware.api [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1780742, 'name': PowerOnVM_Task, 'duration_secs': 0.785215} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.926229] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1294.926229] env[62525]: INFO nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Took 13.84 seconds to spawn the instance on the hypervisor. [ 1294.926229] env[62525]: DEBUG nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1294.926229] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e3c33f-520e-4519-b49d-9aba1f18f184 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.976231] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780743, 'name': CreateVM_Task, 'duration_secs': 0.536239} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.976443] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1294.977201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.993737] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780745, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082147} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.994041] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1294.995150] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f2ac47-0250-428c-af0a-9dbe7788d5e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.023860] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 8c6e22d6-353f-4be5-8400-7fe38a9bee25/8c6e22d6-353f-4be5-8400-7fe38a9bee25.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.027893] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c775104b-43af-475b-832e-ea99ba73720b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.051344] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780744, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621017} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.053455] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 98334a1b-1a73-408f-93a4-6dc72764ebfc/98334a1b-1a73-408f-93a4-6dc72764ebfc.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1295.053455] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1295.053455] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1295.053455] env[62525]: value = "task-1780746" [ 1295.053455] env[62525]: _type = "Task" [ 1295.053455] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.053722] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-facbc519-3024-401a-988a-7c006dc38168 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.064569] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.066487] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1295.066487] env[62525]: value = "task-1780747" [ 1295.066487] env[62525]: _type = "Task" [ 1295.066487] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.075486] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780747, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.193027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.193085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.228201] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52837677-0f88-c596-848a-8c36b771c2dd, 'name': SearchDatastore_Task, 'duration_secs': 0.031253} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.228201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.230427] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1295.230427] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.230427] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.230427] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1295.230632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.230632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1295.230632] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d270a6e3-5491-4dd1-ba41-e45c2f9e69f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.232352] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cad262a-12d0-428b-ad5e-71f742b98008 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.241686] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1295.241686] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5205e68d-6493-7ebf-890d-03d2dc749f18" [ 1295.241686] env[62525]: _type = "Task" [ 1295.241686] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.252663] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1295.252891] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1295.253715] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5205e68d-6493-7ebf-890d-03d2dc749f18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.253982] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8e62769-de90-4fb7-be1c-8cfa79e0c4e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.260560] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1295.260560] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522ba2a0-4048-675e-e6a9-d7819abee0b9" [ 1295.260560] env[62525]: _type = "Task" [ 1295.260560] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.269973] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522ba2a0-4048-675e-e6a9-d7819abee0b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.287034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.287463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.344087] env[62525]: DEBUG nova.compute.utils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1295.345978] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1295.345978] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1295.447353] env[62525]: INFO nova.compute.manager [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Took 20.20 seconds to build instance. [ 1295.567771] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.578600] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780747, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.384998} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.578600] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.578600] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2059ae14-4c12-45aa-b794-219ed04ac26a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.601957] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 98334a1b-1a73-408f-93a4-6dc72764ebfc/98334a1b-1a73-408f-93a4-6dc72764ebfc.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.604026] env[62525]: DEBUG nova.policy [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b2a3e9006c44ebabc5a73be540b9045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4685480cae574a5daac6a1f077a8c319', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1295.605678] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59ac630e-a5ca-4581-bfe6-3416b92c4880 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.628791] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1295.628791] env[62525]: value = "task-1780752" [ 1295.628791] env[62525]: _type = "Task" [ 1295.628791] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.637617] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.706036] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updated VIF entry in instance network info cache for port 9c337d27-bc69-4787-a533-f523faa8aa10. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1295.706258] env[62525]: DEBUG nova.network.neutron [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.751961] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5205e68d-6493-7ebf-890d-03d2dc749f18, 'name': SearchDatastore_Task, 'duration_secs': 0.032069} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.752427] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.752707] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1295.753187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.772334] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522ba2a0-4048-675e-e6a9-d7819abee0b9, 'name': SearchDatastore_Task, 'duration_secs': 0.03898} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.773171] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98eb49b0-d260-4a00-afab-216aa5202d99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.778885] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1295.778885] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5298bd11-aad0-07ea-3e62-0fe03e740595" [ 1295.778885] env[62525]: _type = "Task" [ 1295.778885] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.787729] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5298bd11-aad0-07ea-3e62-0fe03e740595, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.852062] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1295.891661] env[62525]: DEBUG nova.compute.manager [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Received event network-changed-42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1295.891797] env[62525]: DEBUG nova.compute.manager [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Refreshing instance network info cache due to event network-changed-42ec407b-c27a-4d4f-9c35-6c5a65f5db02. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1295.892439] env[62525]: DEBUG oslo_concurrency.lockutils [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] Acquiring lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.894653] env[62525]: DEBUG oslo_concurrency.lockutils [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] Acquired lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.895059] env[62525]: DEBUG nova.network.neutron [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Refreshing network info cache for port 42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1295.952597] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2b43e1ff-36cd-458a-8b82-69cc689b7a4c tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.724s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.070669] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780746, 'name': ReconfigVM_Task, 'duration_secs': 0.844967} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.071053] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 8c6e22d6-353f-4be5-8400-7fe38a9bee25/8c6e22d6-353f-4be5-8400-7fe38a9bee25.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.071595] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-089ca54d-3b23-479e-a630-19d98626d134 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.078168] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1296.078168] env[62525]: value = "task-1780753" [ 1296.078168] env[62525]: _type = "Task" [ 1296.078168] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.084359] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Successfully created port: 44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1296.091521] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780753, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.140319] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780752, 'name': ReconfigVM_Task, 'duration_secs': 0.434573} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.141324] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 98334a1b-1a73-408f-93a4-6dc72764ebfc/98334a1b-1a73-408f-93a4-6dc72764ebfc.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.142767] env[62525]: DEBUG nova.compute.manager [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Received event network-vif-plugged-4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1296.142967] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Acquiring lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.143192] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.143506] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.143673] env[62525]: DEBUG nova.compute.manager [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] No waiting events found dispatching network-vif-plugged-4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1296.143815] env[62525]: WARNING nova.compute.manager [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Received unexpected event network-vif-plugged-4a5c70c1-3224-4684-a897-549395a4ae84 for instance with vm_state building and task_state spawning. [ 1296.144020] env[62525]: DEBUG nova.compute.manager [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Received event network-changed-4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1296.144364] env[62525]: DEBUG nova.compute.manager [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Refreshing instance network info cache due to event network-changed-4a5c70c1-3224-4684-a897-549395a4ae84. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1296.144411] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Acquiring lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.144589] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Acquired lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.144782] env[62525]: DEBUG nova.network.neutron [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Refreshing network info cache for port 4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.145956] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46e7f7fe-85a7-42b8-9b11-a2dbd2e5912f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.153229] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1296.153229] env[62525]: value = "task-1780754" [ 1296.153229] env[62525]: _type = "Task" [ 1296.153229] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.169527] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780754, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.209795] env[62525]: DEBUG oslo_concurrency.lockutils [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.209795] env[62525]: DEBUG nova.compute.manager [req-feda0a0b-fcc5-438b-8f30-ab483dfc0be0 req-c2357408-5ad4-4b48-8029-9d869eba5d59 service nova] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Received event network-vif-deleted-4cdbce67-8f63-4bbb-9079-0e1a0038ee8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1296.240951] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a963aa4-81f6-4ae2-9153-29f5e6cc7798 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.250529] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f37c9e1-0692-4441-b4dc-3eebe0d1199f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.291201] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0fee56-1926-41a3-a614-1b12eb1d4eab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.302435] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5298bd11-aad0-07ea-3e62-0fe03e740595, 'name': SearchDatastore_Task, 'duration_secs': 0.017017} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.303757] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a119168-170f-4b3c-bd00-623aac713517 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.308187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.308425] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6/0e8254af-403d-4f5d-ac58-f3b4efc0c3d6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1296.308762] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.308960] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.309175] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ab10fea-cd4b-4dac-8de6-f06d76886fc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.314018] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fab6870-d341-4e1d-98f3-90a5af0835fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.322262] env[62525]: DEBUG nova.compute.provider_tree [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.326945] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1296.326945] env[62525]: value = "task-1780755" [ 1296.326945] env[62525]: _type = "Task" [ 1296.326945] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.336289] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.340014] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.340014] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1296.340014] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed8456a-bae1-4dbd-8624-54d7cf637f6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.345464] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1296.345464] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52866c74-544d-e98a-54b3-137d11374867" [ 1296.345464] env[62525]: _type = "Task" [ 1296.345464] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.352095] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52866c74-544d-e98a-54b3-137d11374867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.457712] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1296.590928] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780753, 'name': Rename_Task, 'duration_secs': 0.224047} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.591346] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.591688] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39bff4ac-19ac-4207-8e8c-998362119cce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.600563] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1296.600563] env[62525]: value = "task-1780756" [ 1296.600563] env[62525]: _type = "Task" [ 1296.600563] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.610775] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.669969] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780754, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.671189] env[62525]: DEBUG nova.network.neutron [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Updated VIF entry in instance network info cache for port 42ec407b-c27a-4d4f-9c35-6c5a65f5db02. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.671456] env[62525]: DEBUG nova.network.neutron [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Updating instance_info_cache with network_info: [{"id": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "address": "fa:16:3e:09:7a:7b", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ec407b-c2", "ovs_interfaceid": "42ec407b-c27a-4d4f-9c35-6c5a65f5db02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.827944] env[62525]: DEBUG nova.scheduler.client.report [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1296.842818] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.854785] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52866c74-544d-e98a-54b3-137d11374867, 'name': SearchDatastore_Task, 'duration_secs': 0.011712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.858209] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f6a7732-b435-47cf-9713-cff83bb6ad10 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.862971] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1296.867319] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1296.867319] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d2a7ce-1a3f-5522-5bdd-0d2edbc41eaa" [ 1296.867319] env[62525]: _type = "Task" [ 1296.867319] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.877238] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d2a7ce-1a3f-5522-5bdd-0d2edbc41eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.893913] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1296.894196] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1296.894364] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1296.894553] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1296.894695] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1296.894851] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1296.895190] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1296.895932] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1296.895932] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1296.895932] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1296.896118] env[62525]: DEBUG nova.virt.hardware [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1296.897397] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4841abdf-2323-4725-8f3c-6908f56d6a0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.911191] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f549068-29de-4954-92d2-c85fcd041a99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.987051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.084073] env[62525]: DEBUG nova.network.neutron [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Updated VIF entry in instance network info cache for port 4a5c70c1-3224-4684-a897-549395a4ae84. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.084377] env[62525]: DEBUG nova.network.neutron [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Updating instance_info_cache with network_info: [{"id": "4a5c70c1-3224-4684-a897-549395a4ae84", "address": "fa:16:3e:d3:16:24", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5c70c1-32", "ovs_interfaceid": "4a5c70c1-3224-4684-a897-549395a4ae84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.113747] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.168646] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780754, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.176078] env[62525]: DEBUG oslo_concurrency.lockutils [req-24c1a250-fe5c-4f48-9fd4-8661a273cfd3 req-8466309e-fb26-487b-b47a-a0c88e693df1 service nova] Releasing lock "refresh_cache-0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.314282] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "d38bbd59-b40c-4965-b823-caefc93e2568" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.314560] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.335634] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.336643] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1297.350018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.449s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.353126] env[62525]: INFO nova.compute.claims [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.366671] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.380568] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d2a7ce-1a3f-5522-5bdd-0d2edbc41eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.039645} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.380861] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.381142] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 82ea280a-4e1b-4fac-a634-7f79ce731564/82ea280a-4e1b-4fac-a634-7f79ce731564.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1297.381556] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a4b12c5-f7db-4e80-a0c6-7555a0bee354 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.391111] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1297.391111] env[62525]: value = "task-1780757" [ 1297.391111] env[62525]: _type = "Task" [ 1297.391111] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.408369] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.588307] env[62525]: DEBUG oslo_concurrency.lockutils [req-929dda2f-2abb-4324-bc23-91e5d4cdf894 req-89d2eb06-2ec5-4b70-8f8a-13897180355c service nova] Releasing lock "refresh_cache-82ea280a-4e1b-4fac-a634-7f79ce731564" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.613759] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.678182] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780754, 'name': Rename_Task, 'duration_secs': 1.212807} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.678628] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1297.678842] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20b88aa8-0c76-48a0-835e-f02052a866fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.690520] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1297.690520] env[62525]: value = "task-1780758" [ 1297.690520] env[62525]: _type = "Task" [ 1297.690520] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.704199] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.848836] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780755, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.519712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.849180] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6/0e8254af-403d-4f5d-ac58-f3b4efc0c3d6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1297.849433] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1297.849773] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f594d24-429e-4938-8505-5d4663a97d38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.857042] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1297.857042] env[62525]: value = "task-1780759" [ 1297.857042] env[62525]: _type = "Task" [ 1297.857042] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.864478] env[62525]: DEBUG nova.compute.utils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1297.867026] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1297.867026] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1297.875622] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.908728] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780757, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.979482] env[62525]: DEBUG nova.policy [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd21a8e1379ed4017992ff1f8befa90b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cae5d0f44332499ab2dbd7a69fc0aff2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1298.117197] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.208319] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780758, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.375065] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09708} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.375065] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1298.376774] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1298.378571] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da98b58e-53cb-44f2-a4c7-1974019c6145 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.410899] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6/0e8254af-403d-4f5d-ac58-f3b4efc0c3d6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.418849] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8da3a190-570b-4c6b-a43c-fbc7f8aebcab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.442969] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Successfully updated port: 44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1298.452173] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.882839} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.455050] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 82ea280a-4e1b-4fac-a634-7f79ce731564/82ea280a-4e1b-4fac-a634-7f79ce731564.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.455050] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.455050] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1298.455050] env[62525]: value = "task-1780760" [ 1298.455050] env[62525]: _type = "Task" [ 1298.455050] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.455050] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54ddb869-8505-4834-9b5a-32a9554bf8a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.469907] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1298.469907] env[62525]: value = "task-1780761" [ 1298.469907] env[62525]: _type = "Task" [ 1298.469907] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.474080] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.484555] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.614215] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.663598] env[62525]: DEBUG nova.compute.manager [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Received event network-changed-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1298.663790] env[62525]: DEBUG nova.compute.manager [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Refreshing instance network info cache due to event network-changed-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1298.664059] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Acquiring lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.664184] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Acquired lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.664610] env[62525]: DEBUG nova.network.neutron [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Refreshing network info cache for port bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.711910] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780758, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.819379] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e8c140-f618-4c04-a004-34919584aed3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.829821] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d2225e-3b26-4bb9-bf46-146fe868f0bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.864695] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Successfully created port: 682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1298.867931] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1270b00e-91e0-421e-9064-c0280a6702b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.875617] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d76ad1-a8f8-4929-a8fd-095c209df93d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.902482] env[62525]: DEBUG nova.compute.provider_tree [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.945735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.945946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.946103] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1298.976650] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.986131] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105587} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.986420] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1298.987311] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bece16c-eba7-4010-8470-7918509cb515 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.011989] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 82ea280a-4e1b-4fac-a634-7f79ce731564/82ea280a-4e1b-4fac-a634-7f79ce731564.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.011989] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fad25cbb-aaa9-4762-80be-4c59f895b731 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.032832] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1299.032832] env[62525]: value = "task-1780763" [ 1299.032832] env[62525]: _type = "Task" [ 1299.032832] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.043060] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.120639] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.208030] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780758, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.387621] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1299.405960] env[62525]: DEBUG nova.scheduler.client.report [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1299.441697] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:09:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='244876146',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-97174310',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1299.441697] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1299.441697] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1299.441873] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1299.441918] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1299.443374] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1299.444029] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1299.444211] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1299.444380] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1299.444685] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1299.444767] env[62525]: DEBUG nova.virt.hardware [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1299.446073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee55b035-a006-46f2-a2f4-26cbf2fd2512 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.459073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3306f05-9af4-436a-8ec1-a24bcc5b9f4a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.487411] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780760, 'name': ReconfigVM_Task, 'duration_secs': 0.693481} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.490508] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6/0e8254af-403d-4f5d-ac58-f3b4efc0c3d6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.490508] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-524d15b6-aa2a-495e-b226-f201a2f9fbed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.496205] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1299.496205] env[62525]: value = "task-1780764" [ 1299.496205] env[62525]: _type = "Task" [ 1299.496205] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.512940] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780764, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.542454] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780763, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.554491] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1299.617574] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task} progress is 68%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.718348] env[62525]: DEBUG oslo_vmware.api [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780758, 'name': PowerOnVM_Task, 'duration_secs': 1.774559} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.719053] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1299.719360] env[62525]: INFO nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Took 13.93 seconds to spawn the instance on the hypervisor. [ 1299.719561] env[62525]: DEBUG nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1299.720438] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e1cd76-0b19-44b6-8f80-327fd507438f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.854755] env[62525]: DEBUG nova.network.neutron [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updated VIF entry in instance network info cache for port bb0ccd32-fa3c-4e68-98dc-c81a3f541a88. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1299.859088] env[62525]: DEBUG nova.network.neutron [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updating instance_info_cache with network_info: [{"id": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "address": "fa:16:3e:73:35:0d", "network": {"id": "6d708e5d-ee5e-4669-a392-953aae2a8af9", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1717084220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d45bea689ee84b988655da8fff7c4546", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb0ccd32-fa", "ovs_interfaceid": "bb0ccd32-fa3c-4e68-98dc-c81a3f541a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.914603] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.915228] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1299.922459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.790s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.922459] env[62525]: INFO nova.compute.claims [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.984600] env[62525]: DEBUG nova.network.neutron [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Updating instance_info_cache with network_info: [{"id": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "address": "fa:16:3e:26:64:8d", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44ff1acd-15", "ovs_interfaceid": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.009839] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780764, 'name': Rename_Task, 'duration_secs': 0.26277} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.009839] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.009839] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44eaa082-a74d-4ad3-b257-ece61802ca5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.019334] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1300.019334] env[62525]: value = "task-1780765" [ 1300.019334] env[62525]: _type = "Task" [ 1300.019334] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.027920] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780765, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.044216] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780763, 'name': ReconfigVM_Task, 'duration_secs': 0.799725} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.044216] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 82ea280a-4e1b-4fac-a634-7f79ce731564/82ea280a-4e1b-4fac-a634-7f79ce731564.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.044216] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1f5a8aa-522c-42f0-a489-4b17eb40d28c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.050040] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1300.050040] env[62525]: value = "task-1780766" [ 1300.050040] env[62525]: _type = "Task" [ 1300.050040] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.059217] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780766, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.117739] env[62525]: DEBUG oslo_vmware.api [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780756, 'name': PowerOnVM_Task, 'duration_secs': 3.233652} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.118040] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1300.118379] env[62525]: INFO nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Took 16.65 seconds to spawn the instance on the hypervisor. [ 1300.118453] env[62525]: DEBUG nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1300.119215] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a56573d-c363-4a54-9a7f-8eb5cf3a8692 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.250235] env[62525]: INFO nova.compute.manager [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Took 20.41 seconds to build instance. [ 1300.363031] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Releasing lock "refresh_cache-deef59c8-f710-434d-bddc-f63bb3d518b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.364627] env[62525]: DEBUG nova.compute.manager [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Received event network-vif-plugged-44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1300.364627] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Acquiring lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.364627] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.364627] env[62525]: DEBUG oslo_concurrency.lockutils [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.364627] env[62525]: DEBUG nova.compute.manager [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] No waiting events found dispatching network-vif-plugged-44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1300.364995] env[62525]: WARNING nova.compute.manager [req-8e1de322-1acb-4385-9ed6-b26a53df08c0 req-9aa80614-eeb4-4675-a2f2-f8798186eaf6 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Received unexpected event network-vif-plugged-44ff1acd-1593-43a1-95fd-aceba913d7d5 for instance with vm_state building and task_state spawning. [ 1300.427030] env[62525]: DEBUG nova.compute.utils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1300.432894] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1300.432894] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1300.486745] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.487213] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance network_info: |[{"id": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "address": "fa:16:3e:26:64:8d", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44ff1acd-15", "ovs_interfaceid": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1300.488583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:64:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44ff1acd-1593-43a1-95fd-aceba913d7d5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.499912] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating folder: Project (4685480cae574a5daac6a1f077a8c319). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1300.500238] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6af4270d-73a0-4338-9174-fc0d0dd361ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.513762] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created folder: Project (4685480cae574a5daac6a1f077a8c319) in parent group-v369553. [ 1300.513762] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating folder: Instances. Parent ref: group-v369586. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1300.513762] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c64076d-3c1f-4a28-b5da-5cca5620b373 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.526901] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created folder: Instances in parent group-v369586. [ 1300.527323] env[62525]: DEBUG oslo.service.loopingcall [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.528332] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1300.528332] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f67ff4bf-6082-42d3-a968-93cbe5ebdd2f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.548513] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780765, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.555519] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.555519] env[62525]: value = "task-1780769" [ 1300.555519] env[62525]: _type = "Task" [ 1300.555519] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.562854] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780766, 'name': Rename_Task, 'duration_secs': 0.440279} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.563555] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.564087] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53687230-7213-46ee-99be-9d846f0fe11e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.569284] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780769, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.576305] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1300.576305] env[62525]: value = "task-1780770" [ 1300.576305] env[62525]: _type = "Task" [ 1300.576305] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.589063] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.640825] env[62525]: INFO nova.compute.manager [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Took 22.74 seconds to build instance. [ 1300.662191] env[62525]: DEBUG nova.policy [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b2a3e9006c44ebabc5a73be540b9045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4685480cae574a5daac6a1f077a8c319', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1300.753415] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b9fe2cae-2ff4-40d1-a2df-a0a83b0cb51a tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.922s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.934521] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1301.035966] env[62525]: DEBUG oslo_vmware.api [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780765, 'name': PowerOnVM_Task, 'duration_secs': 0.953401} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.036313] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.036516] env[62525]: INFO nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Took 12.91 seconds to spawn the instance on the hypervisor. [ 1301.036692] env[62525]: DEBUG nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1301.037572] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3192edf9-19e7-4096-a4d0-338f1503784a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.072762] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780769, 'name': CreateVM_Task, 'duration_secs': 0.427807} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.076449] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1301.078515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.078515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.078624] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1301.084032] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a9c7eb-4e86-482c-95fe-bae5dffc301f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.093833] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780770, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.097885] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1301.097885] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52976842-346f-2dc0-c792-3ff95ec66ec9" [ 1301.097885] env[62525]: _type = "Task" [ 1301.097885] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.111117] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52976842-346f-2dc0-c792-3ff95ec66ec9, 'name': SearchDatastore_Task, 'duration_secs': 0.015697} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.111447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.111687] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.111933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.112095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.112272] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1301.113232] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-422126d1-7e83-4ccf-87b3-170520208c81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.124286] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1301.124472] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1301.125257] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69fea525-7da2-4f85-aaac-31b23324bce0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.130747] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1301.130747] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5213c41f-064e-8e13-eac0-5463c1c41399" [ 1301.130747] env[62525]: _type = "Task" [ 1301.130747] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.138772] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5213c41f-064e-8e13-eac0-5463c1c41399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.146486] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65415049-eeaa-4dec-b817-d178d8aee8e7 tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.254s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.260030] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1301.350623] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc421eec-05e8-4e12-a343-85f4e41123df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.360566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a31966c-3250-46ab-943f-ddc5653ac86a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.394950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88df7ac1-8141-4412-90c6-fee8d76b765e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.404691] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4518cfcf-8362-412b-bc34-9bacb61b0547 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.424639] env[62525]: DEBUG nova.compute.provider_tree [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.573040] env[62525]: INFO nova.compute.manager [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Took 19.65 seconds to build instance. [ 1301.588938] env[62525]: DEBUG oslo_vmware.api [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780770, 'name': PowerOnVM_Task, 'duration_secs': 0.682786} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.588938] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.589084] env[62525]: INFO nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1301.589266] env[62525]: DEBUG nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1301.590241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34578a14-adfe-4feb-be5f-5fdcb2ff5212 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.647015] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5213c41f-064e-8e13-eac0-5463c1c41399, 'name': SearchDatastore_Task, 'duration_secs': 0.031254} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.647739] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eeacdb5-adf5-4673-8190-d531009b65f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.654755] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1301.661867] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1301.661867] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5259dfda-3c2e-bf2a-bc97-f42e0e51fdd7" [ 1301.661867] env[62525]: _type = "Task" [ 1301.661867] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.670622] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Successfully created port: a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.687639] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5259dfda-3c2e-bf2a-bc97-f42e0e51fdd7, 'name': SearchDatastore_Task, 'duration_secs': 0.011919} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.687639] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.687639] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1301.687639] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53f6f479-dd0d-4514-978a-b24042c18f13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.697441] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1301.697441] env[62525]: value = "task-1780772" [ 1301.697441] env[62525]: _type = "Task" [ 1301.697441] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.712645] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.808312] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.838566] env[62525]: DEBUG nova.compute.manager [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Received event network-changed-44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1301.838710] env[62525]: DEBUG nova.compute.manager [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Refreshing instance network info cache due to event network-changed-44ff1acd-1593-43a1-95fd-aceba913d7d5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1301.838935] env[62525]: DEBUG oslo_concurrency.lockutils [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] Acquiring lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.839085] env[62525]: DEBUG oslo_concurrency.lockutils [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] Acquired lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.839240] env[62525]: DEBUG nova.network.neutron [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Refreshing network info cache for port 44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1301.927287] env[62525]: DEBUG nova.scheduler.client.report [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1301.950344] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1301.993610] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1301.993914] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1301.994074] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.994276] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1301.994423] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.994569] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1301.994977] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1301.994977] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1301.996823] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1301.997085] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1301.997305] env[62525]: DEBUG nova.virt.hardware [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.998763] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec711dbe-6f2f-43e2-bc9f-d713a31fb0ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.011664] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c5dc79-a630-425c-827d-e82d3596b5d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.067474] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Successfully updated port: 682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1302.077220] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c2f097e4-a57a-48a8-ab98-4c67c487fdaf tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.167s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.114815] env[62525]: INFO nova.compute.manager [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Took 18.17 seconds to build instance. [ 1302.185070] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.208642] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780772, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.436670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.437320] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1302.439948] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.855s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.440144] env[62525]: DEBUG nova.objects.instance [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lazy-loading 'resources' on Instance uuid 5bffec39-0b09-49a0-a862-560720db45cd {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.570121] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.571657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.571657] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1302.582137] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.619945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c90b698-a771-457a-89c2-d8b021308a02 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.686s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.717515] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715868} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.717515] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.717515] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.717515] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e24312ca-0f40-44f5-8b34-25c4a1aa4848 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.734382] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1302.734382] env[62525]: value = "task-1780774" [ 1302.734382] env[62525]: _type = "Task" [ 1302.734382] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.747120] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780774, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.756561] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.756846] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.814201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "e3255df2-2de0-4668-ad7b-a864ea680b44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.814385] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.928043] env[62525]: DEBUG nova.compute.manager [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Received event network-vif-plugged-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1302.928452] env[62525]: DEBUG oslo_concurrency.lockutils [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] Acquiring lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.928534] env[62525]: DEBUG oslo_concurrency.lockutils [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.928728] env[62525]: DEBUG oslo_concurrency.lockutils [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.928798] env[62525]: DEBUG nova.compute.manager [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] No waiting events found dispatching network-vif-plugged-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1302.928955] env[62525]: WARNING nova.compute.manager [req-4260407c-82e4-4e85-b79f-17c62bfaac5b req-61900f1a-a089-408b-8222-7d267fe1e924 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Received unexpected event network-vif-plugged-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 for instance with vm_state building and task_state spawning. [ 1302.947054] env[62525]: DEBUG nova.compute.utils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1302.956389] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1303.114812] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.122475] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1303.155219] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1303.249018] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780774, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080858} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.252400] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1303.254413] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f170c00-e4c7-482d-b230-5865a7490e4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.286927] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.290078] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a38a0e3e-f433-4bba-96ac-3768883201e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.313460] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1303.313460] env[62525]: value = "task-1780775" [ 1303.313460] env[62525]: _type = "Task" [ 1303.313460] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.327256] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780775, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.409450] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e945b21-55bd-4f94-acd4-a2c28cd2e28a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.422173] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59bbac1-3585-4baa-8524-1aae46cd90f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.459763] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1303.464027] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ae2b8e-5aa6-45c6-a4b9-f2a1c55b9e88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.474710] env[62525]: DEBUG nova.network.neutron [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updating instance_info_cache with network_info: [{"id": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "address": "fa:16:3e:d1:d7:37", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap682b8bd4-d2", "ovs_interfaceid": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.477367] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e714d7-2e81-483f-bac4-12d75e1fede2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.496396] env[62525]: DEBUG nova.compute.provider_tree [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.511291] env[62525]: DEBUG nova.network.neutron [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Updated VIF entry in instance network info cache for port 44ff1acd-1593-43a1-95fd-aceba913d7d5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.511617] env[62525]: DEBUG nova.network.neutron [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Updating instance_info_cache with network_info: [{"id": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "address": "fa:16:3e:26:64:8d", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44ff1acd-15", "ovs_interfaceid": "44ff1acd-1593-43a1-95fd-aceba913d7d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.656026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.834895] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780775, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.984374] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.984374] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Instance network_info: |[{"id": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "address": "fa:16:3e:d1:d7:37", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap682b8bd4-d2", "ovs_interfaceid": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1303.984564] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:d7:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '682b8bd4-d21c-41b2-a9ed-2eae30b329e0', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1304.002205] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Creating folder: Project (cae5d0f44332499ab2dbd7a69fc0aff2). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.003232] env[62525]: DEBUG nova.scheduler.client.report [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1304.007412] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9fb2729-3116-495d-9d92-1feed87401ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.017673] env[62525]: DEBUG oslo_concurrency.lockutils [req-9033ade2-0436-4c8c-80e3-44b790e5ef9e req-d9e2d127-16a5-427e-a306-b47268cf0b0c service nova] Releasing lock "refresh_cache-56cb0d0c-a7dd-4158-8bed-ddff050e0226" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.027400] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Created folder: Project (cae5d0f44332499ab2dbd7a69fc0aff2) in parent group-v369553. [ 1304.027614] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Creating folder: Instances. Parent ref: group-v369589. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.028235] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55049ad4-bf4f-4a5d-8bf9-589f41bd30e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.046040] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Created folder: Instances in parent group-v369589. [ 1304.046367] env[62525]: DEBUG oslo.service.loopingcall [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1304.046367] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1304.046858] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9887226c-f8b0-4908-9c70-6ef5a51544cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.077420] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1304.077420] env[62525]: value = "task-1780778" [ 1304.077420] env[62525]: _type = "Task" [ 1304.077420] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.087035] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780778, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.328923] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780775, 'name': ReconfigVM_Task, 'duration_secs': 0.639227} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.328923] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1304.329334] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f2f0834-d81a-410c-b096-6889f65fac0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.337840] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1304.337840] env[62525]: value = "task-1780779" [ 1304.337840] env[62525]: _type = "Task" [ 1304.337840] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.348541] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780779, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.471238] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1304.503334] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1304.503598] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1304.503783] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.503978] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1304.504137] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.504338] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1304.504563] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1304.504823] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1304.505483] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1304.505701] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1304.505883] env[62525]: DEBUG nova.virt.hardware [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1304.506782] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a863c4-2855-4fdd-a190-f8c911ed08f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.516862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.076s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.522976] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.452s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.524273] env[62525]: INFO nova.compute.claims [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.531186] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc01bbd-76c7-4ec7-a7f9-c0d73de22747 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.537782] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Successfully updated port: a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.540118] env[62525]: INFO nova.scheduler.client.report [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Deleted allocations for instance 5bffec39-0b09-49a0-a862-560720db45cd [ 1304.560325] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1304.566430] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Creating folder: Project (dbb449c4d9a643598262907eba359cfd). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.566893] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc80003a-e311-4d60-a0bd-c9e5713b778d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.590099] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780778, 'name': CreateVM_Task, 'duration_secs': 0.428026} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.590932] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1304.592415] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.592573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.593026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1304.593203] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Created folder: Project (dbb449c4d9a643598262907eba359cfd) in parent group-v369553. [ 1304.593371] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Creating folder: Instances. Parent ref: group-v369592. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.593612] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e540f829-1e7d-4751-a8b4-2d31ea258cf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.597807] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd665ac5-455e-44d6-a4f5-956b270944a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.602277] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1304.602277] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520b86a1-75ef-eae5-a802-3afb6a57fa6d" [ 1304.602277] env[62525]: _type = "Task" [ 1304.602277] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.609341] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Created folder: Instances in parent group-v369592. [ 1304.609341] env[62525]: DEBUG oslo.service.loopingcall [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1304.609341] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1304.609341] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a36fcb3-d29e-4590-a9dc-1a1d12700534 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.626290] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520b86a1-75ef-eae5-a802-3afb6a57fa6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.633230] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1304.633230] env[62525]: value = "task-1780782" [ 1304.633230] env[62525]: _type = "Task" [ 1304.633230] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.644793] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780782, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.853172] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780779, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.901671] env[62525]: DEBUG nova.compute.manager [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Received event network-vif-plugged-a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1304.901889] env[62525]: DEBUG oslo_concurrency.lockutils [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] Acquiring lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.902108] env[62525]: DEBUG oslo_concurrency.lockutils [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.902315] env[62525]: DEBUG oslo_concurrency.lockutils [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.906349] env[62525]: DEBUG nova.compute.manager [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] No waiting events found dispatching network-vif-plugged-a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1304.906349] env[62525]: WARNING nova.compute.manager [req-909fe98e-340e-414f-9e79-f5d514789a4f req-095da2c4-5fcd-4125-b2ff-db3b91bc8eef service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Received unexpected event network-vif-plugged-a0cbf762-a06a-49a1-8925-b6235d3c0380 for instance with vm_state building and task_state spawning. [ 1305.052151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3684955b-94b5-4ddf-a0bb-65914f174e1c tempest-ServerDiagnosticsNegativeTest-1839105827 tempest-ServerDiagnosticsNegativeTest-1839105827-project-member] Lock "5bffec39-0b09-49a0-a862-560720db45cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.657s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.055561] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.055561] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.055561] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.116222] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520b86a1-75ef-eae5-a802-3afb6a57fa6d, 'name': SearchDatastore_Task, 'duration_secs': 0.018839} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.117042] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.117430] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1305.117822] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.118108] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.118413] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.118824] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-680ba409-582c-47cd-9f48-1657be6e033e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.132022] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.132022] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1305.132022] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd225304-baee-48d0-b6f8-d9470f090f54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.149175] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780782, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.152288] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1305.152288] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c761f9-640c-9f8d-fecb-645648de925a" [ 1305.152288] env[62525]: _type = "Task" [ 1305.152288] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.165912] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c761f9-640c-9f8d-fecb-645648de925a, 'name': SearchDatastore_Task, 'duration_secs': 0.01438} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.168168] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f48be29c-d58a-4fdb-98a3-046704497ea3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.177257] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1305.177257] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ed924e-1454-280a-c4c7-7c4c30b1d582" [ 1305.177257] env[62525]: _type = "Task" [ 1305.177257] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.190028] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ed924e-1454-280a-c4c7-7c4c30b1d582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.356834] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780779, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.654296] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780782, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.681380] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.698224] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ed924e-1454-280a-c4c7-7c4c30b1d582, 'name': SearchDatastore_Task, 'duration_secs': 0.015112} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.701511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.702538] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] aa639aa3-d21c-4923-bc39-56e648c566fb/aa639aa3-d21c-4923-bc39-56e648c566fb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1305.702538] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56f79b9f-ffae-4f34-a795-f72f6096d5ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.711877] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1305.711877] env[62525]: value = "task-1780784" [ 1305.711877] env[62525]: _type = "Task" [ 1305.711877] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.728021] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.859802] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780779, 'name': Rename_Task, 'duration_secs': 1.277034} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.866167] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1305.866167] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06038041-b7cb-46e1-9dd1-2313486457a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.872993] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1305.872993] env[62525]: value = "task-1780785" [ 1305.872993] env[62525]: _type = "Task" [ 1305.872993] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.888790] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.011489] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b96432-7587-47ef-84f8-5b1bc9e71add {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.022298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0dfe08-a704-4d87-ae11-d2b413521c70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.062402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9072b746-d7ef-43b6-88fc-3a7b7f0f08ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.073551] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c93cd62-4502-4538-b814-f485d401a7b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.092831] env[62525]: DEBUG nova.compute.provider_tree [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.153749] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780782, 'name': CreateVM_Task, 'duration_secs': 1.229333} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.154746] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.157077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.157077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.157077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1306.157077] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-611136c9-5802-4edd-b344-4c6cc56054af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.166684] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1306.166684] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c7c345-c689-541a-069f-9622c95f6d4c" [ 1306.166684] env[62525]: _type = "Task" [ 1306.166684] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.178142] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c7c345-c689-541a-069f-9622c95f6d4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.228513] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780784, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.295688] env[62525]: DEBUG nova.network.neutron [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Updating instance_info_cache with network_info: [{"id": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "address": "fa:16:3e:93:cc:8f", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0cbf762-a0", "ovs_interfaceid": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.329806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "84fbb408-7810-4166-a53e-242d51f60322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.330139] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.384404] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.601291] env[62525]: DEBUG nova.scheduler.client.report [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1306.688259] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c7c345-c689-541a-069f-9622c95f6d4c, 'name': SearchDatastore_Task, 'duration_secs': 0.087367} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.688739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.689202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1306.689524] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.689813] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.690063] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.690704] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d8fcccb-6d2c-4f25-99ea-a8e17056faea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.705518] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.705749] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1306.706649] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-134958d7-70a7-4d7c-8899-9f6a3176ef8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.713180] env[62525]: DEBUG nova.compute.manager [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Received event network-changed-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1306.713409] env[62525]: DEBUG nova.compute.manager [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Refreshing instance network info cache due to event network-changed-682b8bd4-d21c-41b2-a9ed-2eae30b329e0. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1306.713493] env[62525]: DEBUG oslo_concurrency.lockutils [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] Acquiring lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.713619] env[62525]: DEBUG oslo_concurrency.lockutils [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] Acquired lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.713774] env[62525]: DEBUG nova.network.neutron [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Refreshing network info cache for port 682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.724022] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1306.724022] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52262f19-5d5f-b16f-f824-c2d8f915cc63" [ 1306.724022] env[62525]: _type = "Task" [ 1306.724022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.731808] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780784, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.745494] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52262f19-5d5f-b16f-f824-c2d8f915cc63, 'name': SearchDatastore_Task, 'duration_secs': 0.01687} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.748868] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31dcf86-635c-48c8-b156-10098dd6669a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.761399] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1306.761399] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52debb2d-7cdf-fe16-df2f-f7c2a67c9844" [ 1306.761399] env[62525]: _type = "Task" [ 1306.761399] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.775450] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52debb2d-7cdf-fe16-df2f-f7c2a67c9844, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.798328] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.803371] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Instance network_info: |[{"id": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "address": "fa:16:3e:93:cc:8f", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0cbf762-a0", "ovs_interfaceid": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1306.804451] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:cc:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0cbf762-a06a-49a1-8925-b6235d3c0380', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.816681] env[62525]: DEBUG oslo.service.loopingcall [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.817535] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.817784] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4ad1da0-56aa-49ed-9761-8d62d8e68c52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.855425] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.855425] env[62525]: value = "task-1780786" [ 1306.855425] env[62525]: _type = "Task" [ 1306.855425] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.867861] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780786, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.889607] env[62525]: DEBUG oslo_vmware.api [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780785, 'name': PowerOnVM_Task, 'duration_secs': 0.999645} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.889607] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1306.889607] env[62525]: INFO nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1306.890022] env[62525]: DEBUG nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1306.892016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c53088-d23b-4209-a19f-7ede217a0457 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.104965] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.105533] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1307.110168] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.122s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.114281] env[62525]: INFO nova.compute.claims [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.237366] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780784, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.029865} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.238568] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] aa639aa3-d21c-4923-bc39-56e648c566fb/aa639aa3-d21c-4923-bc39-56e648c566fb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1307.241139] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1307.241139] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2772c5b8-0e12-479e-a5ca-98e6fa56a81b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.255613] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1307.255613] env[62525]: value = "task-1780788" [ 1307.255613] env[62525]: _type = "Task" [ 1307.255613] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.269535] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780788, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.276907] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52debb2d-7cdf-fe16-df2f-f7c2a67c9844, 'name': SearchDatastore_Task, 'duration_secs': 0.025126} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.276907] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.276907] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7f8392fa-1c11-4180-bda9-057b5cfa058c/7f8392fa-1c11-4180-bda9-057b5cfa058c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1307.278064] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eda032ce-5637-4b2c-8df3-01c9bda1c2a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.290249] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1307.290249] env[62525]: value = "task-1780789" [ 1307.290249] env[62525]: _type = "Task" [ 1307.290249] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.300325] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.370204] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780786, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.415394] env[62525]: INFO nova.compute.manager [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Took 19.41 seconds to build instance. [ 1307.490339] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.490560] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.620246] env[62525]: DEBUG nova.compute.utils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.624948] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1307.771347] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780788, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077432} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.771726] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1307.775024] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf5e2b4-29f1-4dfb-9ab0-dde8c41576a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.806142] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] aa639aa3-d21c-4923-bc39-56e648c566fb/aa639aa3-d21c-4923-bc39-56e648c566fb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1307.810691] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03cd43ef-8ba6-4ed1-b2ff-72573a8eaeef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.833879] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780789, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.836269] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1307.836269] env[62525]: value = "task-1780790" [ 1307.836269] env[62525]: _type = "Task" [ 1307.836269] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.853326] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780790, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.869770] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780786, 'name': CreateVM_Task, 'duration_secs': 0.775854} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.869944] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.870718] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.870993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.871323] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1307.871584] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec324f71-abb1-49cc-8dd8-1343f6786e9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.878281] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1307.878281] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52789bf7-8381-ade2-6bf7-fbe6cace0d43" [ 1307.878281] env[62525]: _type = "Task" [ 1307.878281] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.889393] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52789bf7-8381-ade2-6bf7-fbe6cace0d43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.917717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23231106-2bb3-4a7d-8130-d6efef156bd4 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.925s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.126404] env[62525]: DEBUG nova.network.neutron [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updated VIF entry in instance network info cache for port 682b8bd4-d21c-41b2-a9ed-2eae30b329e0. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1308.126502] env[62525]: DEBUG nova.network.neutron [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updating instance_info_cache with network_info: [{"id": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "address": "fa:16:3e:d1:d7:37", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap682b8bd4-d2", "ovs_interfaceid": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.133601] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1308.138324] env[62525]: DEBUG nova.compute.manager [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Received event network-changed-a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1308.142677] env[62525]: DEBUG nova.compute.manager [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Refreshing instance network info cache due to event network-changed-a0cbf762-a06a-49a1-8925-b6235d3c0380. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1308.142677] env[62525]: DEBUG oslo_concurrency.lockutils [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] Acquiring lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.142677] env[62525]: DEBUG oslo_concurrency.lockutils [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] Acquired lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.142677] env[62525]: DEBUG nova.network.neutron [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Refreshing network info cache for port a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.313716] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.983811} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.314034] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7f8392fa-1c11-4180-bda9-057b5cfa058c/7f8392fa-1c11-4180-bda9-057b5cfa058c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1308.314967] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1308.315239] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa6a04af-cc7c-4e6e-ab36-b7351c824ef9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.325852] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1308.325852] env[62525]: value = "task-1780791" [ 1308.325852] env[62525]: _type = "Task" [ 1308.325852] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.345776] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.356512] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780790, 'name': ReconfigVM_Task, 'duration_secs': 0.425465} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.357726] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Reconfigured VM instance instance-0000000d to attach disk [datastore1] aa639aa3-d21c-4923-bc39-56e648c566fb/aa639aa3-d21c-4923-bc39-56e648c566fb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.358213] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ae8d937-6783-40c9-bf84-cea4ada7ad56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.369410] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1308.369410] env[62525]: value = "task-1780792" [ 1308.369410] env[62525]: _type = "Task" [ 1308.369410] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.383017] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780792, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.397300] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52789bf7-8381-ade2-6bf7-fbe6cace0d43, 'name': SearchDatastore_Task, 'duration_secs': 0.07861} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.397300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.397300] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1308.397300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.397636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.397636] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1308.397636] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-781da2ef-69f5-474c-b1dc-101997d328c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.408093] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1308.408355] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1308.409155] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ee7b3e-6905-49e0-bdff-a2991b4cad7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.417549] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1308.417549] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5238fafa-3de8-9305-bcb9-b7507dbdb594" [ 1308.417549] env[62525]: _type = "Task" [ 1308.417549] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.421708] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1308.435288] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5238fafa-3de8-9305-bcb9-b7507dbdb594, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.630981] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0eefb8-0d22-49d1-ad47-02d518e14c6e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.640872] env[62525]: DEBUG oslo_concurrency.lockutils [req-9d46288d-e35d-415c-bdbb-14f158fb280b req-ea40be52-b0a8-4dc6-8ec8-cd54ec688388 service nova] Releasing lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.641433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a019da6-97c7-41a2-b06a-51bf2dd98d3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.682898] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045512fb-bb64-44fd-9965-3ffd3f40c241 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.694074] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9582228-be9b-4da4-9abd-c40de7a6665d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.712741] env[62525]: DEBUG nova.compute.provider_tree [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1308.753406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.753672] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.753897] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.754110] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.754291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.757026] env[62525]: INFO nova.compute.manager [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Terminating instance [ 1308.759952] env[62525]: DEBUG nova.compute.manager [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1308.759952] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1308.760892] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74595a8-f153-4727-b124-259caf7770a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.771186] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1308.771248] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-139bae73-519c-44ef-b757-205e7ef1d190 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.781596] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1308.781596] env[62525]: value = "task-1780793" [ 1308.781596] env[62525]: _type = "Task" [ 1308.781596] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.798577] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.842184] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.842540] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.843384] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f827c51b-301f-444b-88e1-38aa90de68bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.869274] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 7f8392fa-1c11-4180-bda9-057b5cfa058c/7f8392fa-1c11-4180-bda9-057b5cfa058c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.873841] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-226b6938-dc82-4d52-9e3e-e248d642cea1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.900634] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780792, 'name': Rename_Task, 'duration_secs': 0.219732} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.902789] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.903115] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1308.903115] env[62525]: value = "task-1780794" [ 1308.903115] env[62525]: _type = "Task" [ 1308.903115] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.903474] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1718d67f-6b1a-446e-93ec-27ed95c0060b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.914135] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1308.914135] env[62525]: value = "task-1780795" [ 1308.914135] env[62525]: _type = "Task" [ 1308.914135] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.917696] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780794, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.935052] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780795, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.940159] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5238fafa-3de8-9305-bcb9-b7507dbdb594, 'name': SearchDatastore_Task, 'duration_secs': 0.041483} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.940822] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-384fe002-e920-4ae8-b2b0-697d17456510 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.955543] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1308.955543] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52669b61-6061-27c4-a916-495b79835928" [ 1308.955543] env[62525]: _type = "Task" [ 1308.955543] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.968527] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52669b61-6061-27c4-a916-495b79835928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.986659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.153233] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1309.193149] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1309.194135] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1309.194135] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.194135] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1309.194135] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.194370] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1309.194431] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1309.195295] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1309.195295] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1309.195295] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1309.195295] env[62525]: DEBUG nova.virt.hardware [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1309.196752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d137a5f-bf96-471d-b7ad-9f7f7d0627f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.207617] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68788a9e-22e5-465f-b195-fa72c3fd0990 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.236652] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.248458] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Creating folder: Project (4937a5af3a6b4d3798c5ed2322dfe2a6). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.248536] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56c89dfa-dfbd-4035-aede-3c3b124a648b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.258069] env[62525]: ERROR nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [req-41ef9253-729b-4e20-9826-1da49a5f7fb5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-41ef9253-729b-4e20-9826-1da49a5f7fb5"}]} [ 1309.265695] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Created folder: Project (4937a5af3a6b4d3798c5ed2322dfe2a6) in parent group-v369553. [ 1309.265906] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Creating folder: Instances. Parent ref: group-v369597. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.267174] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3e38fe9-4629-446c-9d4f-4caffc8a3e0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.287431] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Created folder: Instances in parent group-v369597. [ 1309.287431] env[62525]: DEBUG oslo.service.loopingcall [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.288147] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1309.294535] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.295369] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1e93bb8-ffd9-4e46-899c-68c0dc4dc199 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.328185] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1309.328342] env[62525]: DEBUG nova.compute.provider_tree [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1309.331565] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780793, 'name': PowerOffVM_Task, 'duration_secs': 0.332358} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.334097] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1309.334906] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1309.334906] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.334906] env[62525]: value = "task-1780799" [ 1309.334906] env[62525]: _type = "Task" [ 1309.334906] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.335283] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca52e520-b03e-4e30-a109-3ee2c9ce47b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.356858] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780799, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.362768] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: 1347c382-ea9f-478d-83fd-3edc1b98450e {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1309.382520] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1309.426417] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.437577] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780795, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.442998] env[62525]: DEBUG nova.network.neutron [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Updated VIF entry in instance network info cache for port a0cbf762-a06a-49a1-8925-b6235d3c0380. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1309.444171] env[62525]: DEBUG nova.network.neutron [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Updating instance_info_cache with network_info: [{"id": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "address": "fa:16:3e:93:cc:8f", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0cbf762-a0", "ovs_interfaceid": "a0cbf762-a06a-49a1-8925-b6235d3c0380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.471314] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52669b61-6061-27c4-a916-495b79835928, 'name': SearchDatastore_Task, 'duration_secs': 0.017798} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.472371] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.473764] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f93669f2-c59d-4f3f-85a2-a60d714326ac/f93669f2-c59d-4f3f-85a2-a60d714326ac.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1309.473764] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-734759d4-e449-4350-925a-0576fca28338 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.486487] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1309.486487] env[62525]: value = "task-1780801" [ 1309.486487] env[62525]: _type = "Task" [ 1309.486487] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.497162] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780801, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.733048] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1309.733579] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1309.734057] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Deleting the datastore file [datastore1] 8c6e22d6-353f-4be5-8400-7fe38a9bee25 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1309.735616] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19512acb-819d-4b4b-a1e6-6199c730dbe3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.747887] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for the task: (returnval){ [ 1309.747887] env[62525]: value = "task-1780802" [ 1309.747887] env[62525]: _type = "Task" [ 1309.747887] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.761157] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.862702] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780799, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.930640] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780794, 'name': ReconfigVM_Task, 'duration_secs': 0.808223} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.943097] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 7f8392fa-1c11-4180-bda9-057b5cfa058c/7f8392fa-1c11-4180-bda9-057b5cfa058c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1309.944611] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8139154-550e-4d5b-96ea-9c14bd8c51a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.949469] env[62525]: DEBUG oslo_concurrency.lockutils [req-20cc15d8-65e5-4678-9e11-5986618fb310 req-cb61b180-400b-4e5e-b776-8b199f4fa883 service nova] Releasing lock "refresh_cache-f93669f2-c59d-4f3f-85a2-a60d714326ac" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.955906] env[62525]: DEBUG oslo_vmware.api [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1780795, 'name': PowerOnVM_Task, 'duration_secs': 0.683626} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.956991] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.957276] env[62525]: INFO nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Took 10.57 seconds to spawn the instance on the hypervisor. [ 1309.957488] env[62525]: DEBUG nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1309.959101] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1309.959101] env[62525]: value = "task-1780803" [ 1309.959101] env[62525]: _type = "Task" [ 1309.959101] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.961592] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5068ca9f-b012-45eb-82ea-79a4092f3207 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.983237] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780803, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.001961] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780801, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.044386] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bfaecb-1de3-4275-a5a8-b15bb52bd6df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.058023] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fd6072-e4c2-43c8-b455-caac4c8a3012 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.099788] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18045b9-fe54-4bab-a60c-a8dae6f8d92b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.117894] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079d5f9e-b1d4-44e8-9017-b731e1de79a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.135839] env[62525]: DEBUG nova.compute.provider_tree [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1310.262938] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.363983] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780799, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.478173] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780803, 'name': Rename_Task, 'duration_secs': 0.307194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.478173] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1310.478173] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26b73232-25a9-4598-8740-2d9bf33f4d7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.492080] env[62525]: INFO nova.compute.manager [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Took 21.42 seconds to build instance. [ 1310.495391] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1310.495391] env[62525]: value = "task-1780804" [ 1310.495391] env[62525]: _type = "Task" [ 1310.495391] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.513759] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780801, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.804557} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.518045] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f93669f2-c59d-4f3f-85a2-a60d714326ac/f93669f2-c59d-4f3f-85a2-a60d714326ac.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1310.518431] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1310.518730] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.519607] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d713b16e-10ae-4b98-9c79-830a63317d1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.531166] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1310.531166] env[62525]: value = "task-1780805" [ 1310.531166] env[62525]: _type = "Task" [ 1310.531166] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.545415] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.664277] env[62525]: ERROR nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [req-8bdbf89f-8a2d-412e-afdd-b1cde7692d5e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8bdbf89f-8a2d-412e-afdd-b1cde7692d5e"}]} [ 1310.699689] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1310.737366] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1310.737366] env[62525]: DEBUG nova.compute.provider_tree [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1310.757093] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: 1347c382-ea9f-478d-83fd-3edc1b98450e {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1310.768692] env[62525]: DEBUG oslo_vmware.api [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Task: {'id': task-1780802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.749239} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.771200] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1310.771534] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1310.772136] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1310.772356] env[62525]: INFO nova.compute.manager [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Took 2.01 seconds to destroy the instance on the hypervisor. [ 1310.772694] env[62525]: DEBUG oslo.service.loopingcall [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1310.772924] env[62525]: DEBUG nova.compute.manager [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1310.773050] env[62525]: DEBUG nova.network.neutron [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1310.791265] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1310.871024] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780799, 'name': CreateVM_Task, 'duration_secs': 1.402023} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.871024] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1310.871024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.871024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.871024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1310.871024] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03cea449-a8e2-45cf-82c7-213fe341afd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.878187] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1310.878187] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5253b6d6-70bc-5035-c8ad-5b2a9ad1aa9e" [ 1310.878187] env[62525]: _type = "Task" [ 1310.878187] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.894916] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5253b6d6-70bc-5035-c8ad-5b2a9ad1aa9e, 'name': SearchDatastore_Task, 'duration_secs': 0.012109} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.895298] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.895479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.895712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.895848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.896066] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1310.896319] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaeafb81-5963-4821-b045-dd6e96aa7578 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.906748] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1310.906945] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1310.907747] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91c47e67-c56b-4e2c-b2fc-77260fa3b471 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.916494] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1310.916494] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52592cba-aa0a-65a0-2468-dfefb811e9b2" [ 1310.916494] env[62525]: _type = "Task" [ 1310.916494] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.926419] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1310.926779] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1310.938765] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52592cba-aa0a-65a0-2468-dfefb811e9b2, 'name': SearchDatastore_Task, 'duration_secs': 0.011564} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.939839] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0520bf32-967e-4eeb-b786-5cee8500e84d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.950055] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1310.950055] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7cd9d-a630-c9d6-07e9-3412137ff064" [ 1310.950055] env[62525]: _type = "Task" [ 1310.950055] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.968567] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e7cd9d-a630-c9d6-07e9-3412137ff064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.996678] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b81034c-4361-4a66-af49-d2f7f7ef9b5a tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.938s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.018221] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780804, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.047188] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089389} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.047458] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1311.048267] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03070669-cb99-4328-a70d-f0db1b7e8308 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.076028] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] f93669f2-c59d-4f3f-85a2-a60d714326ac/f93669f2-c59d-4f3f-85a2-a60d714326ac.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1311.079496] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecb15c6a-3027-41d5-8a47-0f62de57ea99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.104175] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1311.104175] env[62525]: value = "task-1780806" [ 1311.104175] env[62525]: _type = "Task" [ 1311.104175] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.113311] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780806, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.349566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76977b7d-d796-4ad2-8006-24f2720a933d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.357807] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698942d7-b268-4e1f-80fd-8c33143e02f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.395021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5268ed6a-094b-402e-a274-499dfec051a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.404208] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea4c802-8864-4c18-b8e6-12f961e59e1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.420845] env[62525]: DEBUG nova.compute.provider_tree [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.461254] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e7cd9d-a630-c9d6-07e9-3412137ff064, 'name': SearchDatastore_Task, 'duration_secs': 0.032215} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.461320] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.461570] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 30fbab3d-8141-4d7e-987f-e4f4fc4a1808/30fbab3d-8141-4d7e-987f-e4f4fc4a1808.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1311.461860] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45bd11e9-b155-4b3e-a3c3-92c5e165e69f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.473144] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1311.473144] env[62525]: value = "task-1780808" [ 1311.473144] env[62525]: _type = "Task" [ 1311.473144] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.485731] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.500478] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1311.518348] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780804, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.617061] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780806, 'name': ReconfigVM_Task, 'duration_secs': 0.347808} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.617061] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Reconfigured VM instance instance-0000000e to attach disk [datastore1] f93669f2-c59d-4f3f-85a2-a60d714326ac/f93669f2-c59d-4f3f-85a2-a60d714326ac.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.617888] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4fca7b2-fd7e-4a12-9e0d-599805b36701 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.628890] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1311.628890] env[62525]: value = "task-1780809" [ 1311.628890] env[62525]: _type = "Task" [ 1311.628890] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.640619] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780809, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.925207] env[62525]: DEBUG nova.scheduler.client.report [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.984933] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780808, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.021007] env[62525]: DEBUG oslo_vmware.api [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780804, 'name': PowerOnVM_Task, 'duration_secs': 1.0338} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.024195] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.024195] env[62525]: INFO nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Took 7.55 seconds to spawn the instance on the hypervisor. [ 1312.024195] env[62525]: DEBUG nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1312.024195] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42549dc-b1b9-4f50-89d2-0446e8aa2572 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.034738] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.144681] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780809, 'name': Rename_Task, 'duration_secs': 0.505386} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.145088] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1312.145238] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5981d17d-ada6-4ffb-9bdd-a9c276df6dac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.154217] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1312.154217] env[62525]: value = "task-1780810" [ 1312.154217] env[62525]: _type = "Task" [ 1312.154217] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.164627] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.434428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.325s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.434951] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1312.438535] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.630s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.440426] env[62525]: INFO nova.compute.claims [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.443969] env[62525]: DEBUG nova.network.neutron [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.488036] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533909} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.492693] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 30fbab3d-8141-4d7e-987f-e4f4fc4a1808/30fbab3d-8141-4d7e-987f-e4f4fc4a1808.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1312.492693] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1312.492693] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-331d41b7-4388-4ee0-a10b-a9284876487c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.498302] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1312.498302] env[62525]: value = "task-1780811" [ 1312.498302] env[62525]: _type = "Task" [ 1312.498302] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.508881] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.548500] env[62525]: INFO nova.compute.manager [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Took 21.45 seconds to build instance. [ 1312.668558] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780810, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.946910] env[62525]: DEBUG nova.compute.utils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.953934] env[62525]: INFO nova.compute.manager [-] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Took 2.18 seconds to deallocate network for instance. [ 1312.953934] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1313.014551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "e34ebddc-2192-4975-81d7-0f5c200f114e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.014657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.023496] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076987} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.023811] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1313.024541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d9e581-2c16-48dd-9eb1-e3422a9e4a62 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.053115] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 30fbab3d-8141-4d7e-987f-e4f4fc4a1808/30fbab3d-8141-4d7e-987f-e4f4fc4a1808.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.054831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-85436550-f349-4f9e-92f7-793eb8d42433 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.969s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.055097] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d76e51ee-b283-40d2-b667-61585cc902c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.082303] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1313.082303] env[62525]: value = "task-1780812" [ 1313.082303] env[62525]: _type = "Task" [ 1313.082303] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.091539] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.172623] env[62525]: DEBUG oslo_vmware.api [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1780810, 'name': PowerOnVM_Task, 'duration_secs': 0.817342} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.173048] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1313.173262] env[62525]: INFO nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Took 11.22 seconds to spawn the instance on the hypervisor. [ 1313.173446] env[62525]: DEBUG nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1313.175363] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab3d813-0344-41df-9a52-0122d72f50e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.269544] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.269656] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1313.269835] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.401142] env[62525]: DEBUG nova.compute.manager [req-08f2148c-370f-4054-9673-dc86d0bf61dc req-31214716-7e2e-4030-8e86-63f26b20b41b service nova] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Received event network-vif-deleted-40988ca1-f187-490a-9770-d08a56e6b866 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1313.459678] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1313.467426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.575552] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1313.596989] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.701652] env[62525]: INFO nova.compute.manager [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Took 22.84 seconds to build instance. [ 1313.711603] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.711603] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.773812] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.921461] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dd5871-79af-4786-a9be-65379381d376 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.932600] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0bdef0-18e2-4e17-9862-1237b67d7ebb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.981044] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b7b90d-8f2b-4c2b-ac0e-64c660f985fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.991396] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b918e7-b083-4d99-9d14-034ba6537b8e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.010259] env[62525]: DEBUG nova.compute.provider_tree [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1314.096490] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780812, 'name': ReconfigVM_Task, 'duration_secs': 0.868253} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.096871] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.097090] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 30fbab3d-8141-4d7e-987f-e4f4fc4a1808/30fbab3d-8141-4d7e-987f-e4f4fc4a1808.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.099158] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f2b6bcd-37ef-48a6-8b3c-c13b0f74456f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.105580] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1314.105580] env[62525]: value = "task-1780813" [ 1314.105580] env[62525]: _type = "Task" [ 1314.105580] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.115208] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780813, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.203775] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9e82e7c-458c-4547-8970-cb07f174719f tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.357s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.482354] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1314.508812] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1314.509351] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1314.509543] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1314.509732] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1314.509884] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1314.510030] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1314.510249] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1314.510446] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1314.510644] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1314.511141] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1314.511141] env[62525]: DEBUG nova.virt.hardware [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1314.512994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c64867-ff8e-4bf8-b406-5cf7ed75ff25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.517165] env[62525]: DEBUG nova.scheduler.client.report [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1314.528530] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbab1d1-837f-4b9c-8dfe-111ea384479c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.552504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1314.558903] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Creating folder: Project (f643d81487864504885efd7ae3dec105). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.559574] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8a3a250-5703-4cbb-95a3-525906dc240e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.572091] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Created folder: Project (f643d81487864504885efd7ae3dec105) in parent group-v369553. [ 1314.572302] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Creating folder: Instances. Parent ref: group-v369600. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.572865] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8311367-bdf4-4667-9a0e-9247b586e1b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.584323] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Created folder: Instances in parent group-v369600. [ 1314.584803] env[62525]: DEBUG oslo.service.loopingcall [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1314.586363] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1314.586363] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e8653d0-5dbd-47c9-bb87-871e2d0fab42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.615176] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1314.615176] env[62525]: value = "task-1780816" [ 1314.615176] env[62525]: _type = "Task" [ 1314.615176] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.625123] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780813, 'name': Rename_Task, 'duration_secs': 0.165404} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.625123] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.625123] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea94202a-69b3-46eb-ade9-a848534c9ad9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.627594] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780816, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.629339] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1314.629339] env[62525]: value = "task-1780817" [ 1314.629339] env[62525]: _type = "Task" [ 1314.629339] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.639765] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780817, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.707299] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1315.024945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.024945] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1315.027141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.842s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.028700] env[62525]: INFO nova.compute.claims [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1315.130970] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780816, 'name': CreateVM_Task, 'duration_secs': 0.37444} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.134461] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1315.135181] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.135257] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.135864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1315.136733] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c886a34-b8b3-4e9b-8ce4-2a5af6b0b6c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.145091] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780817, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.145091] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1315.145091] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5263b678-ad5d-8e22-c971-93d93f26cf50" [ 1315.145091] env[62525]: _type = "Task" [ 1315.145091] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.155740] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263b678-ad5d-8e22-c971-93d93f26cf50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.244722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.299927] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.303590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.303590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "cfae9bf8-012a-4286-b978-bba8a913bba2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.303590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.535136] env[62525]: DEBUG nova.compute.utils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1315.542225] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1315.542492] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1315.647020] env[62525]: DEBUG oslo_vmware.api [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780817, 'name': PowerOnVM_Task, 'duration_secs': 0.637616} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.647020] env[62525]: DEBUG nova.policy [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6e5adfb79a4911a67c14a7f7b41a17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6486285375a44318c14aee23e914dcf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1315.649456] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.649948] env[62525]: INFO nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Took 6.50 seconds to spawn the instance on the hypervisor. [ 1315.650266] env[62525]: DEBUG nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1315.651455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637a9e9c-7f86-4c79-8b53-27bef4bd2770 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.674225] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263b678-ad5d-8e22-c971-93d93f26cf50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.040986] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1316.180045] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263b678-ad5d-8e22-c971-93d93f26cf50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.184793] env[62525]: INFO nova.compute.manager [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Took 22.14 seconds to build instance. [ 1316.556297] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Successfully created port: cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1316.641487] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.641741] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.648191] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9d28cc-984a-4781-bcca-c7e195f71ff1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.663995] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9ce2d6-f8ca-4ba0-bfa9-5dca6e7f5a62 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.672268] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263b678-ad5d-8e22-c971-93d93f26cf50, 'name': SearchDatastore_Task, 'duration_secs': 1.214219} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.672268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.672268] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1316.672597] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.672968] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.673638] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1316.673638] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22872482-6b54-479b-9fc4-5c1e504da0a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.707457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-20885365-c9a1-4e01-92e5-15afc95ebf62 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.665s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.709935] env[62525]: DEBUG nova.compute.manager [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Received event network-changed-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1316.710091] env[62525]: DEBUG nova.compute.manager [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Refreshing instance network info cache due to event network-changed-682b8bd4-d21c-41b2-a9ed-2eae30b329e0. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1316.710384] env[62525]: DEBUG oslo_concurrency.lockutils [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] Acquiring lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.710534] env[62525]: DEBUG oslo_concurrency.lockutils [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] Acquired lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.710695] env[62525]: DEBUG nova.network.neutron [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Refreshing network info cache for port 682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1316.718335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186e2482-61bb-41c0-8052-b91b997cbe27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.740588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.740588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.743541] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1316.743541] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1316.743955] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47a15e4-54aa-49f7-b16f-03b13c68cb60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.750072] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71a19e1b-81b9-42fd-a1d1-3583669e5464 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.783277] env[62525]: DEBUG nova.compute.provider_tree [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.788773] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1316.788773] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52afa3e0-0c24-c7e6-62a9-ee3da75c2507" [ 1316.788773] env[62525]: _type = "Task" [ 1316.788773] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.814784] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52afa3e0-0c24-c7e6-62a9-ee3da75c2507, 'name': SearchDatastore_Task, 'duration_secs': 0.017413} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.819027] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35c36965-027c-4d30-8744-897cb4c4057c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.828466] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1316.828466] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52bcfe2c-da19-f9fc-ddba-cb9338a513a1" [ 1316.828466] env[62525]: _type = "Task" [ 1316.828466] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.841294] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52bcfe2c-da19-f9fc-ddba-cb9338a513a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.056128] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1317.100533] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1317.100533] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1317.100533] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1317.100794] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1317.100794] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1317.100869] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1317.101023] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1317.103097] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1317.103302] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1317.103831] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1317.104458] env[62525]: DEBUG nova.virt.hardware [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1317.108491] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4fb788-7948-44a5-a201-9fd1b8e0cdbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.123303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf225208-0a8e-4da8-99e7-39c6337785bf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.229610] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1317.291823] env[62525]: DEBUG nova.scheduler.client.report [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1317.345985] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52bcfe2c-da19-f9fc-ddba-cb9338a513a1, 'name': SearchDatastore_Task, 'duration_secs': 0.024054} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.345985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.345985] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1317.345985] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2d2d970-c689-41ab-8bee-b56878b6983f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.358136] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1317.358136] env[62525]: value = "task-1780818" [ 1317.358136] env[62525]: _type = "Task" [ 1317.358136] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.377883] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.413028] env[62525]: DEBUG nova.compute.manager [None req-bec581c0-d249-4d45-9b6d-a9349cac7fb0 tempest-ServerDiagnosticsV248Test-885257089 tempest-ServerDiagnosticsV248Test-885257089-project-admin] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1317.413525] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6e31f4-39cf-42d5-8ba1-93e2fc5bc1a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.425924] env[62525]: INFO nova.compute.manager [None req-bec581c0-d249-4d45-9b6d-a9349cac7fb0 tempest-ServerDiagnosticsV248Test-885257089 tempest-ServerDiagnosticsV248Test-885257089-project-admin] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Retrieving diagnostics [ 1317.425924] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c8ce11-56f2-42e8-ac6a-317de1b88c80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.775023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.802635] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.803440] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1317.807130] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.692s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.810205] env[62525]: INFO nova.compute.claims [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1317.841322] env[62525]: DEBUG nova.network.neutron [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updated VIF entry in instance network info cache for port 682b8bd4-d21c-41b2-a9ed-2eae30b329e0. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1317.841998] env[62525]: DEBUG nova.network.neutron [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updating instance_info_cache with network_info: [{"id": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "address": "fa:16:3e:d1:d7:37", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap682b8bd4-d2", "ovs_interfaceid": "682b8bd4-d21c-41b2-a9ed-2eae30b329e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.874041] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780818, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.316536] env[62525]: DEBUG nova.compute.utils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1318.323928] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1318.323928] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1318.346419] env[62525]: DEBUG oslo_concurrency.lockutils [req-3014dd99-f45d-40b1-90f8-32b9f8bf9d1d req-4568c2d0-1afa-4b2f-8b0f-7aa6da25c726 service nova] Releasing lock "refresh_cache-aa639aa3-d21c-4923-bc39-56e648c566fb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.369429] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780818, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73799} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.369690] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1318.369895] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1318.370170] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d45f9f9b-8fea-426b-bc16-9b07bd047af4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.381208] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1318.381208] env[62525]: value = "task-1780819" [ 1318.381208] env[62525]: _type = "Task" [ 1318.381208] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.389654] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.398794] env[62525]: DEBUG nova.policy [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '263033744f954d3c91e656d552889679', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc29658da6d8472b8dce5699cef37ed0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1318.607945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.608340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.831830] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1318.900643] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.906430] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1318.906532] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcfd2ca-c3bc-4743-be13-1e6fb9953e7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.929399] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.935316] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0c5fcbd-1f53-4d85-8e9f-bf5c858f3129 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.961733] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1318.961733] env[62525]: value = "task-1780820" [ 1318.961733] env[62525]: _type = "Task" [ 1318.961733] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.973800] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.350349] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Successfully created port: 3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1319.477302] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.534917] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d02f94-cdf3-4f16-823e-7b3da515f554 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.545016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb415fb-a7e4-42a6-8921-ef21726dba88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.549146] env[62525]: DEBUG nova.compute.manager [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received event network-vif-plugged-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.549365] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.549569] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.549730] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.549892] env[62525]: DEBUG nova.compute.manager [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] No waiting events found dispatching network-vif-plugged-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1319.550059] env[62525]: WARNING nova.compute.manager [req-8c9ba199-799d-4582-af3b-b0e7a8d9ad30 req-b56a4144-c4b7-4089-a4cd-39053caad0cd service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received unexpected event network-vif-plugged-cf41a826-2546-4877-b604-5fd32f6cc102 for instance with vm_state building and task_state spawning. [ 1319.589224] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0c3fba-3243-4a61-9577-61893f4e7cc5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.592409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.592641] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.600228] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263a909e-48f4-42f8-81b6-f849054cb29c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.618342] env[62525]: DEBUG nova.compute.provider_tree [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.640066] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Successfully updated port: cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1319.841232] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1319.883262] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1319.883902] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1319.884167] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1319.884476] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1319.884852] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1319.885060] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1319.885435] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1319.885646] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1319.885851] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1319.886081] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1319.886309] env[62525]: DEBUG nova.virt.hardware [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1319.887264] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbcb952-7c28-4b43-8048-731e79047963 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.900455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69db5ae-2499-49d0-b9d9-1c33b8967ce7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.973971] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780820, 'name': ReconfigVM_Task, 'duration_secs': 0.864054} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.974336] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfigured VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.974957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df4a529c-82ac-4dbf-8ed8-f4f092bc1d46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.995289] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1319.995289] env[62525]: value = "task-1780821" [ 1319.995289] env[62525]: _type = "Task" [ 1319.995289] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.007075] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780821, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.127899] env[62525]: DEBUG nova.scheduler.client.report [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1320.144722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.144722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.144722] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1320.511737] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780821, 'name': Rename_Task, 'duration_secs': 0.371421} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.512980] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.512980] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b94ab7e-22e9-4d90-934c-cbe9678d132f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.521359] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1320.521359] env[62525]: value = "task-1780822" [ 1320.521359] env[62525]: _type = "Task" [ 1320.521359] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.530699] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.638259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.831s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.638872] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1320.644413] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.989s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.647052] env[62525]: INFO nova.compute.claims [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.734984] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1320.887503] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.887742] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.888068] env[62525]: DEBUG nova.compute.manager [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.888875] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48961cd-c4fb-450e-b013-cd5fcf07e2ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.896720] env[62525]: DEBUG nova.compute.manager [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1320.897367] env[62525]: DEBUG nova.objects.instance [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lazy-loading 'flavor' on Instance uuid 98334a1b-1a73-408f-93a4-6dc72764ebfc {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.034673] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780822, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.035058] env[62525]: DEBUG nova.network.neutron [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [{"id": "cf41a826-2546-4877-b604-5fd32f6cc102", "address": "fa:16:3e:0b:1f:7e", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf41a826-25", "ovs_interfaceid": "cf41a826-2546-4877-b604-5fd32f6cc102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.152483] env[62525]: DEBUG nova.compute.utils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1321.160551] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1321.406889] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1321.407239] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b81258c9-967e-43a3-a5bf-61ea2b1f91df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.417551] env[62525]: DEBUG oslo_vmware.api [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1321.417551] env[62525]: value = "task-1780823" [ 1321.417551] env[62525]: _type = "Task" [ 1321.417551] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.435401] env[62525]: DEBUG oslo_vmware.api [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.540246] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.540572] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance network_info: |[{"id": "cf41a826-2546-4877-b604-5fd32f6cc102", "address": "fa:16:3e:0b:1f:7e", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf41a826-25", "ovs_interfaceid": "cf41a826-2546-4877-b604-5fd32f6cc102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1321.540906] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:1f:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf41a826-2546-4877-b604-5fd32f6cc102', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1321.553056] env[62525]: DEBUG oslo.service.loopingcall [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.553056] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1321.553232] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f72e257-d25b-401f-bacd-d2b1a2b27818 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.581257] env[62525]: DEBUG oslo_vmware.api [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780822, 'name': PowerOnVM_Task, 'duration_secs': 0.743098} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.582065] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.582298] env[62525]: INFO nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Took 7.10 seconds to spawn the instance on the hypervisor. [ 1321.582528] env[62525]: DEBUG nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1321.584144] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcb5210-3545-48d1-b433-944cb796e38d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.588643] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1321.588643] env[62525]: value = "task-1780824" [ 1321.588643] env[62525]: _type = "Task" [ 1321.588643] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.604913] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780824, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.665502] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1321.931216] env[62525]: DEBUG oslo_vmware.api [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780823, 'name': PowerOffVM_Task, 'duration_secs': 0.394618} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.931513] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.931680] env[62525]: DEBUG nova.compute.manager [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1321.932619] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9922503c-58cf-4966-aa27-04cebedeec7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.037108] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Successfully updated port: 3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1322.089900] env[62525]: DEBUG nova.compute.manager [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1322.090105] env[62525]: DEBUG nova.compute.manager [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing instance network info cache due to event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1322.090352] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] Acquiring lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.090498] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] Acquired lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.090880] env[62525]: DEBUG nova.network.neutron [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing network info cache for port cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1322.112785] env[62525]: INFO nova.compute.manager [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Took 25.15 seconds to build instance. [ 1322.123976] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780824, 'name': CreateVM_Task, 'duration_secs': 0.464092} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.126309] env[62525]: DEBUG nova.compute.manager [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Received event network-vif-plugged-3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1322.126537] env[62525]: DEBUG oslo_concurrency.lockutils [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] Acquiring lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.126838] env[62525]: DEBUG oslo_concurrency.lockutils [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.126922] env[62525]: DEBUG oslo_concurrency.lockutils [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.127086] env[62525]: DEBUG nova.compute.manager [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] No waiting events found dispatching network-vif-plugged-3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1322.128090] env[62525]: WARNING nova.compute.manager [req-d6cbb083-e8fa-4a9f-90e9-8a4ba8b0cfe1 req-75a3413f-e629-41e1-8abf-de15c6bb1dd0 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Received unexpected event network-vif-plugged-3030b0dc-e404-441e-ab9d-2a05ca9d68e2 for instance with vm_state building and task_state spawning. [ 1322.128090] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1322.129256] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.129416] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.130096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1322.130183] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f38b4ed4-2f94-4511-bdb0-09816f6ee0c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.139989] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1322.139989] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5293913e-b8f6-58a2-86fc-1555a54445b6" [ 1322.139989] env[62525]: _type = "Task" [ 1322.139989] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.155568] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5293913e-b8f6-58a2-86fc-1555a54445b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.305793] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f20b99a-920e-41e7-8f9b-6bf887547788 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.316735] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850c589f-1c52-43b7-8a3c-9af46bd35bed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.360015] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e94799-b3e9-42b0-bfae-425680ae2dbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.369656] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd0a471-aada-4748-b653-38df7d45c897 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.388047] env[62525]: DEBUG nova.compute.provider_tree [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.461208] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9307ca71-2a16-4466-bc1c-2dd6491ae4e7 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.573s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.540738] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.540738] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquired lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.540738] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.626038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342b4bbb-621a-4b0f-8ab5-600835d58f9d tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.312s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.653925] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5293913e-b8f6-58a2-86fc-1555a54445b6, 'name': SearchDatastore_Task, 'duration_secs': 0.019945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.655656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.655656] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.655656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.655656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.656117] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1322.656117] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2bffbce-115c-4363-b6b6-79dfcae66717 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.670840] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1322.671043] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1322.683175] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd9273c3-4ed2-4cb6-a829-a789fbece02f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.683329] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1322.692044] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1322.692044] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526e1884-f702-ffd5-2edb-9ad5f399a88c" [ 1322.692044] env[62525]: _type = "Task" [ 1322.692044] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.716525] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526e1884-f702-ffd5-2edb-9ad5f399a88c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.759804] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1322.760242] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1322.760515] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1322.763439] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1322.763439] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1322.763439] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1322.763439] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1322.763439] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1322.763797] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1322.763797] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1322.763797] env[62525]: DEBUG nova.virt.hardware [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1322.763797] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40af978-ed5b-4d23-8f63-f4466c891572 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.775185] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6f66f0-5230-4d87-a495-20c843ecbb2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.790233] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1322.794865] env[62525]: DEBUG oslo.service.loopingcall [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1322.795019] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1322.795236] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f4e279e-27eb-4ea0-9b2a-47ee2a33fb90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.816222] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1322.816222] env[62525]: value = "task-1780825" [ 1322.816222] env[62525]: _type = "Task" [ 1322.816222] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.834833] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780825, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.895575] env[62525]: DEBUG nova.scheduler.client.report [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1322.899163] env[62525]: INFO nova.compute.manager [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Rebuilding instance [ 1323.031830] env[62525]: DEBUG nova.compute.manager [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1323.036240] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19e1021-888a-4647-a9de-0ab22e3ec2e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.130788] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1323.174472] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1323.211746] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526e1884-f702-ffd5-2edb-9ad5f399a88c, 'name': SearchDatastore_Task, 'duration_secs': 0.018621} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.212589] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb6b7ad3-d2ee-4b17-a243-10111ba6b844 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.219285] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1323.219285] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521ec794-4e45-8d68-e8ff-61927f4ce31c" [ 1323.219285] env[62525]: _type = "Task" [ 1323.219285] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.239721] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521ec794-4e45-8d68-e8ff-61927f4ce31c, 'name': SearchDatastore_Task, 'duration_secs': 0.013696} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.240016] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.240336] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b6bdc187-a266-4f7d-a9e4-85cb100cf4bf/b6bdc187-a266-4f7d-a9e4-85cb100cf4bf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1323.240607] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63b6532e-0951-4dfc-82ce-55f6aecd6350 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.251501] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1323.251501] env[62525]: value = "task-1780826" [ 1323.251501] env[62525]: _type = "Task" [ 1323.251501] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.265769] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.310559] env[62525]: DEBUG nova.network.neutron [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updated VIF entry in instance network info cache for port cf41a826-2546-4877-b604-5fd32f6cc102. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1323.310857] env[62525]: DEBUG nova.network.neutron [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [{"id": "cf41a826-2546-4877-b604-5fd32f6cc102", "address": "fa:16:3e:0b:1f:7e", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf41a826-25", "ovs_interfaceid": "cf41a826-2546-4877-b604-5fd32f6cc102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.330041] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780825, 'name': CreateVM_Task, 'duration_secs': 0.378357} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.330731] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.331636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.331636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.331636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1323.332074] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-861bbcc2-36f5-4dc4-b7b7-95a16efa09ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.339160] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1323.339160] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521985b1-a74f-5e03-6ec6-bc1ebd0ad0ad" [ 1323.339160] env[62525]: _type = "Task" [ 1323.339160] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.352043] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521985b1-a74f-5e03-6ec6-bc1ebd0ad0ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.404651] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.760s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.404979] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1323.411031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.422s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.411031] env[62525]: INFO nova.compute.claims [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1323.561448] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.561448] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9a237cf-d8c8-4b90-9a87-b2059baae392 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.567789] env[62525]: DEBUG nova.network.neutron [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Updating instance_info_cache with network_info: [{"id": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "address": "fa:16:3e:29:6e:74", "network": {"id": "54c72c88-a0f1-4cd0-868a-abf71987b54a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1044062588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc29658da6d8472b8dce5699cef37ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3030b0dc-e4", "ovs_interfaceid": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.573321] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1323.573321] env[62525]: value = "task-1780827" [ 1323.573321] env[62525]: _type = "Task" [ 1323.573321] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.583686] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.662214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.769735] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.815654] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b2db559-71f8-42fe-b4e7-b863a5276f65 req-bbd2389b-0e8d-4d56-a81f-329e7a53c284 service nova] Releasing lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.857065] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521985b1-a74f-5e03-6ec6-bc1ebd0ad0ad, 'name': SearchDatastore_Task, 'duration_secs': 0.025267} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.857516] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.857746] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1323.858128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.858278] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.858441] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1323.858737] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa37bf1c-6d64-48c4-a2c6-a9723683a11b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.872209] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1323.872209] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1323.872209] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d76628b3-e213-414f-a22b-49cb5295a231 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.887408] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1323.887408] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cab884-65c9-5c0b-7004-ad561ac39994" [ 1323.887408] env[62525]: _type = "Task" [ 1323.887408] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.900827] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cab884-65c9-5c0b-7004-ad561ac39994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.915332] env[62525]: DEBUG nova.compute.utils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1323.919516] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1323.920133] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1324.072447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Releasing lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.072885] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Instance network_info: |[{"id": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "address": "fa:16:3e:29:6e:74", "network": {"id": "54c72c88-a0f1-4cd0-868a-abf71987b54a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1044062588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc29658da6d8472b8dce5699cef37ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3030b0dc-e4", "ovs_interfaceid": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1324.073438] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:6e:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ed91b7b-b4ec-486d-ab34-af0afb7ec691', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3030b0dc-e404-441e-ab9d-2a05ca9d68e2', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1324.082092] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Creating folder: Project (bc29658da6d8472b8dce5699cef37ed0). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.086501] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11a92f29-a614-45d1-bf37-015d0a785431 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.095932] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780827, 'name': PowerOffVM_Task, 'duration_secs': 0.133334} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.095932] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.095932] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.097262] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8452d6f-2dd7-4aa6-8aab-b6dce32173d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.101521] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Created folder: Project (bc29658da6d8472b8dce5699cef37ed0) in parent group-v369553. [ 1324.101682] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Creating folder: Instances. Parent ref: group-v369605. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.102478] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfecbc46-3979-43db-a489-50ca27df4190 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.107438] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.107906] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53323624-9c71-44f1-a941-cd7ddddbcc88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.119033] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Created folder: Instances in parent group-v369605. [ 1324.119033] env[62525]: DEBUG oslo.service.loopingcall [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.119033] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1324.119266] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-465a1fb8-77f4-472a-b479-c9fe9aad972a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.142200] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.142200] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.142335] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Deleting the datastore file [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.143851] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8f89378-8e19-40ba-b867-93314ae63101 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.146013] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1324.146013] env[62525]: value = "task-1780831" [ 1324.146013] env[62525]: _type = "Task" [ 1324.146013] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.154468] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1324.154468] env[62525]: value = "task-1780832" [ 1324.154468] env[62525]: _type = "Task" [ 1324.154468] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.163070] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780831, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.170998] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.269462] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.303403] env[62525]: DEBUG nova.compute.manager [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Received event network-changed-3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1324.303403] env[62525]: DEBUG nova.compute.manager [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Refreshing instance network info cache due to event network-changed-3030b0dc-e404-441e-ab9d-2a05ca9d68e2. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1324.303403] env[62525]: DEBUG oslo_concurrency.lockutils [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] Acquiring lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.303403] env[62525]: DEBUG oslo_concurrency.lockutils [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] Acquired lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.303403] env[62525]: DEBUG nova.network.neutron [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Refreshing network info cache for port 3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1324.304354] env[62525]: DEBUG nova.policy [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce008a841cf41389c74dce78bf3cd99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a380a12ff0444c989a3a42dbaf5d579e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1324.387475] env[62525]: DEBUG nova.objects.instance [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lazy-loading 'flavor' on Instance uuid 98334a1b-1a73-408f-93a4-6dc72764ebfc {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.403077] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cab884-65c9-5c0b-7004-ad561ac39994, 'name': SearchDatastore_Task, 'duration_secs': 0.019671} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.404103] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67ce035e-d6e6-4615-bf0d-fa7d8680089a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.412138] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1324.412138] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52321573-b585-2072-994f-57c181371d8f" [ 1324.412138] env[62525]: _type = "Task" [ 1324.412138] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.432489] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1324.443860] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52321573-b585-2072-994f-57c181371d8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.663031] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780831, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.669705] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162688} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.669848] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.669951] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.670125] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.776555] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780826, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.898945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.899157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquired lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.900405] env[62525]: DEBUG nova.network.neutron [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1324.900405] env[62525]: DEBUG nova.objects.instance [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lazy-loading 'info_cache' on Instance uuid 98334a1b-1a73-408f-93a4-6dc72764ebfc {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.937987] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52321573-b585-2072-994f-57c181371d8f, 'name': SearchDatastore_Task, 'duration_secs': 0.121482} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.937987] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.937987] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1324.937987] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff50e7c0-4496-43e4-83fa-415076ae5be5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.965963] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1324.965963] env[62525]: value = "task-1780833" [ 1324.965963] env[62525]: _type = "Task" [ 1324.965963] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.982094] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.060531] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Successfully created port: 1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1325.155332] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31513d86-48ad-46ce-bcc0-98b5f6a768c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.161546] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780831, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.167328] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbec5bdf-8311-446a-ae27-66c0cb6007f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.208023] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980fda2f-902c-4f71-9eb2-56d592fd61ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.215409] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0266100-c2e7-4c79-994c-23da3a91d604 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.234397] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1325.267912] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780826, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.826934} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.270467] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b6bdc187-a266-4f7d-a9e4-85cb100cf4bf/b6bdc187-a266-4f7d-a9e4-85cb100cf4bf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.270467] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.270467] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f64cffc-1e3a-453f-8bdb-bb8368f48c25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.278931] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1325.278931] env[62525]: value = "task-1780834" [ 1325.278931] env[62525]: _type = "Task" [ 1325.278931] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.289464] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.403977] env[62525]: DEBUG nova.objects.base [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Object Instance<98334a1b-1a73-408f-93a4-6dc72764ebfc> lazy-loaded attributes: flavor,info_cache {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1325.455379] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1325.500796] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780833, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.500796] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1325.500796] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1325.502834] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.502834] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1325.502834] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.502834] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1325.502834] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1325.503137] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1325.503137] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1325.503137] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1325.503137] env[62525]: DEBUG nova.virt.hardware [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1325.503137] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2a3be5-8e4e-4533-8e39-f0d7f4ac7ffb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.515500] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cdeb03-a16d-4429-ad2e-7e075ca1fcf5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.660154] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780831, 'name': CreateVM_Task, 'duration_secs': 1.117734} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.660300] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1325.661080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.661259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.661572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1325.661953] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca25761b-db90-4bec-9b45-91b325f918ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.668545] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1325.668545] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5217f271-98d6-7b28-96fe-60388d53740d" [ 1325.668545] env[62525]: _type = "Task" [ 1325.668545] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.679954] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5217f271-98d6-7b28-96fe-60388d53740d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.745250] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1325.745501] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1325.745652] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.745825] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1325.745962] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.747973] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1325.748206] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1325.748460] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1325.748721] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1325.748963] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1325.749230] env[62525]: DEBUG nova.virt.hardware [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1325.751408] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fece368-8c6d-4b47-b146-d006bafc641c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.762872] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c824834b-83c5-48bb-9738-13e7a49792dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.768928] env[62525]: ERROR nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [req-a3b7dbe7-5e27-4d90-89f7-3616863f5b39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a3b7dbe7-5e27-4d90-89f7-3616863f5b39"}]} [ 1325.786800] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1325.794039] env[62525]: DEBUG oslo.service.loopingcall [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.797939] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1325.803150] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1325.803712] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8097c082-b027-4fc0-b51f-7a78f08f474f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.824536] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081285} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.826301] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1325.826626] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1325.826626] env[62525]: value = "task-1780835" [ 1325.826626] env[62525]: _type = "Task" [ 1325.826626] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.827367] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51c5027-d590-4daa-a405-d3c818ed035c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.832482] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1325.832482] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1325.844661] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780835, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.862407] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] b6bdc187-a266-4f7d-a9e4-85cb100cf4bf/b6bdc187-a266-4f7d-a9e4-85cb100cf4bf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1325.863619] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1325.865698] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5799b9ce-7971-4ad3-8975-0e58428201b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.889752] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1325.889752] env[62525]: value = "task-1780836" [ 1325.889752] env[62525]: _type = "Task" [ 1325.889752] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.899775] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.903257] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1325.980590] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780833, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.012972} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.980876] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.981114] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.981375] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b46f7e84-45f1-4440-97c5-767c2e18b626 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.990462] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1325.990462] env[62525]: value = "task-1780837" [ 1325.990462] env[62525]: _type = "Task" [ 1325.990462] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.002650] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.181446] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5217f271-98d6-7b28-96fe-60388d53740d, 'name': SearchDatastore_Task, 'duration_secs': 0.128247} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.181757] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.182027] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.182268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.182411] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.182584] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.183249] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a6c1228-71cb-46ca-9604-f42ee444fb1c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.200170] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.200412] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1326.203906] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e43f3ee8-7074-44cc-8e21-2bfce51c17eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.211048] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1326.211048] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52847437-9a30-c415-dc72-ddc7d3688736" [ 1326.211048] env[62525]: _type = "Task" [ 1326.211048] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.225501] env[62525]: DEBUG nova.network.neutron [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Updated VIF entry in instance network info cache for port 3030b0dc-e404-441e-ab9d-2a05ca9d68e2. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1326.225924] env[62525]: DEBUG nova.network.neutron [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Updating instance_info_cache with network_info: [{"id": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "address": "fa:16:3e:29:6e:74", "network": {"id": "54c72c88-a0f1-4cd0-868a-abf71987b54a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1044062588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc29658da6d8472b8dce5699cef37ed0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3030b0dc-e4", "ovs_interfaceid": "3030b0dc-e404-441e-ab9d-2a05ca9d68e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.235257] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52847437-9a30-c415-dc72-ddc7d3688736, 'name': SearchDatastore_Task, 'duration_secs': 0.014235} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.235783] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6894bded-3263-4c6f-801a-ebcf61acba48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.243723] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1326.243723] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a7f577-d5ba-bf9f-466b-f4424bdc5c92" [ 1326.243723] env[62525]: _type = "Task" [ 1326.243723] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.257142] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a7f577-d5ba-bf9f-466b-f4424bdc5c92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.342696] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780835, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.405653] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780836, 'name': ReconfigVM_Task, 'duration_secs': 0.361558} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.409614] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Reconfigured VM instance instance-00000012 to attach disk [datastore1] b6bdc187-a266-4f7d-a9e4-85cb100cf4bf/b6bdc187-a266-4f7d-a9e4-85cb100cf4bf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1326.410487] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48a3828a-2441-4c17-8226-a119ae43675a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.417836] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1326.417836] env[62525]: value = "task-1780838" [ 1326.417836] env[62525]: _type = "Task" [ 1326.417836] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.431061] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780838, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.493701] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c32f38-8764-4b23-85b2-1c53330d21fb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.507788] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070774} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.508497] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.509527] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a39aa9-0db2-4c8c-a974-a608ad07b9e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.514692] env[62525]: DEBUG nova.network.neutron [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updating instance_info_cache with network_info: [{"id": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "address": "fa:16:3e:9a:97:9c", "network": {"id": "d3852c70-9b8b-4c78-a2f1-f7f9e42d51cd", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-733437359-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "557d744dc35943aab165225698db81bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce676e95-1f", "ovs_interfaceid": "ce676e95-1fd5-4abf-9228-aa35cc8606e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.516717] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549935b7-c0c8-4fb5-b45d-4c8fb5e0ea29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.541129] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.573357] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9459ffa6-6b9b-4f4e-9f99-b3a4baee2610 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.589598] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cae6659-f4e8-4873-8fe9-21b051acebfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.599418] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a379c89f-ec74-4e17-b591-3b311147b3c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.347217] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Successfully updated port: 1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1327.349910] env[62525]: DEBUG oslo_concurrency.lockutils [req-9895f721-ccc5-4279-991b-4c7840f96520 req-4376823d-5dca-41f1-b562-9401c053b4b1 service nova] Releasing lock "refresh_cache-35a2e221-e1c5-49d9-af93-5e5f28c62b8f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.350655] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Releasing lock "refresh_cache-98334a1b-1a73-408f-93a4-6dc72764ebfc" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.354773] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1327.354773] env[62525]: value = "task-1780839" [ 1327.354773] env[62525]: _type = "Task" [ 1327.354773] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.369027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.369027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.382509] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1327.391888] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a7f577-d5ba-bf9f-466b-f4424bdc5c92, 'name': SearchDatastore_Task, 'duration_secs': 0.016145} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.392366] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780835, 'name': CreateVM_Task, 'duration_secs': 0.658146} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.397121] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.397417] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 35a2e221-e1c5-49d9-af93-5e5f28c62b8f/35a2e221-e1c5-49d9-af93-5e5f28c62b8f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1327.397622] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.397866] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780838, 'name': Rename_Task, 'duration_secs': 0.161022} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.399264] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-065774da-adf3-4b7d-b5ce-8c78894ea962 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.402230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.402230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.402230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1327.402484] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.405837] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1a88262-25ed-4a99-a72b-1c8f85cca46d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.407528] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7684a0b-6922-4d81-9e60-a3ea2b0d1c4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.408972] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780839, 'name': ReconfigVM_Task, 'duration_secs': 0.519932} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.410287] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.413668] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f68c98c0-7755-4873-9107-836573a6e1c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.415882] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1327.415882] env[62525]: value = "task-1780840" [ 1327.415882] env[62525]: _type = "Task" [ 1327.415882] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.417958] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1327.417958] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522720d9-d072-e21a-4cf1-0c4bbb682324" [ 1327.417958] env[62525]: _type = "Task" [ 1327.417958] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.418216] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1327.418216] env[62525]: value = "task-1780841" [ 1327.418216] env[62525]: _type = "Task" [ 1327.418216] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.422662] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1327.422662] env[62525]: value = "task-1780842" [ 1327.422662] env[62525]: _type = "Task" [ 1327.422662] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.436022] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.448805] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522720d9-d072-e21a-4cf1-0c4bbb682324, 'name': SearchDatastore_Task, 'duration_secs': 0.018961} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.449587] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780842, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.449962] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780841, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.450423] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.450519] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1327.450737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.450982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.451186] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.451411] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75eb7722-3b3d-41af-9db2-8b2d109ae9f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.462054] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.462271] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1327.463063] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96c70f7f-2df4-42d2-9757-06ef702188d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.470801] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1327.470801] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d65c0b-add9-5bc7-dc9d-f3effe99ae85" [ 1327.470801] env[62525]: _type = "Task" [ 1327.470801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.480261] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d65c0b-add9-5bc7-dc9d-f3effe99ae85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.495766] env[62525]: DEBUG nova.compute.manager [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Received event network-vif-plugged-1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1327.495980] env[62525]: DEBUG oslo_concurrency.lockutils [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] Acquiring lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.496193] env[62525]: DEBUG oslo_concurrency.lockutils [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] Lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.496427] env[62525]: DEBUG oslo_concurrency.lockutils [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] Lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.496600] env[62525]: DEBUG nova.compute.manager [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] No waiting events found dispatching network-vif-plugged-1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1327.496756] env[62525]: WARNING nova.compute.manager [req-a9e92595-bacb-4212-876a-ba57438cdfcd req-c7236e76-1932-4ece-86b1-96a047111c5d service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Received unexpected event network-vif-plugged-1a9d7127-13cf-4940-83b3-8cecf0ea3a03 for instance with vm_state building and task_state spawning. [ 1327.859955] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.859955] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.859955] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.859955] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1327.859955] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad27541b-5d25-41c7-83ff-0e2886f45850 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.873038] env[62525]: DEBUG oslo_vmware.api [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1327.873038] env[62525]: value = "task-1780843" [ 1327.873038] env[62525]: _type = "Task" [ 1327.873038] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.888852] env[62525]: DEBUG oslo_vmware.api [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.927569] env[62525]: ERROR nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [req-ea56847c-47f4-4a49-b0e6-1ec99aa2a7be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ea56847c-47f4-4a49-b0e6-1ec99aa2a7be"}]} [ 1327.943022] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780840, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.955241] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780841, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.955540] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780842, 'name': Rename_Task, 'duration_secs': 0.166311} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.956844] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1327.961517] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.961517] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-788e4f7a-5576-4b32-b9a0-8d0eeca031a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.972824] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1327.972824] env[62525]: value = "task-1780844" [ 1327.972824] env[62525]: _type = "Task" [ 1327.972824] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.982228] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1327.982474] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1327.991537] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d65c0b-add9-5bc7-dc9d-f3effe99ae85, 'name': SearchDatastore_Task, 'duration_secs': 0.011993} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.001044] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.001044] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1328.003793] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec684545-4df5-487b-be43-b76da9e57c3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.015149] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1328.015149] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cfe6a5-af5a-7591-e99d-c9a6bbfd0ac4" [ 1328.015149] env[62525]: _type = "Task" [ 1328.015149] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.029456] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cfe6a5-af5a-7591-e99d-c9a6bbfd0ac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.039464] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1328.281795] env[62525]: DEBUG nova.compute.manager [None req-24e73bb2-cd65-4f73-b49d-f37adf9eb9b0 tempest-ServerDiagnosticsV248Test-885257089 tempest-ServerDiagnosticsV248Test-885257089-project-admin] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1328.283374] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41872e4a-2507-4622-b812-eb549bb44103 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.292499] env[62525]: INFO nova.compute.manager [None req-24e73bb2-cd65-4f73-b49d-f37adf9eb9b0 tempest-ServerDiagnosticsV248Test-885257089 tempest-ServerDiagnosticsV248Test-885257089-project-admin] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Retrieving diagnostics [ 1328.295566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6c2043-4d77-49dd-88c5-775f3417792c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.384369] env[62525]: DEBUG oslo_vmware.api [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780843, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.433094] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652915} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.436527] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 35a2e221-e1c5-49d9-af93-5e5f28c62b8f/35a2e221-e1c5-49d9-af93-5e5f28c62b8f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.436844] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.439527] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e237cde5-bfeb-46da-95c5-b4dbb3e2e970 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.449497] env[62525]: DEBUG oslo_vmware.api [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780841, 'name': PowerOnVM_Task, 'duration_secs': 0.561625} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.451278] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1328.451625] env[62525]: INFO nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 11.40 seconds to spawn the instance on the hypervisor. [ 1328.452345] env[62525]: DEBUG nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1328.452345] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1328.452345] env[62525]: value = "task-1780845" [ 1328.452345] env[62525]: _type = "Task" [ 1328.452345] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.453139] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0280c8-d2ae-4b2b-9985-806c46495f23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.466945] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.475628] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.491771] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780844, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.524151] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cfe6a5-af5a-7591-e99d-c9a6bbfd0ac4, 'name': SearchDatastore_Task, 'duration_secs': 0.029189} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.529016] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.529420] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1328.530909] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d4fb699-3fca-45d4-bf5f-471455a704e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.540289] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1328.540289] env[62525]: value = "task-1780846" [ 1328.540289] env[62525]: _type = "Task" [ 1328.540289] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.553188] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.702802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d0b015-6a6d-465f-8405-98ae663ab901 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.714466] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d15623-ea62-404b-bf3c-d1402b1d26fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.762182] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b11891-e14c-4e19-97d6-3b258899bd7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.772346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761d274d-5f1c-4896-a01e-96f42289181c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.789351] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1328.886366] env[62525]: DEBUG oslo_vmware.api [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780843, 'name': PowerOnVM_Task, 'duration_secs': 0.835158} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.886679] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1328.886872] env[62525]: DEBUG nova.compute.manager [None req-f6f75cf2-d6c4-49d3-a21c-5d2b3530db90 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1328.887840] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456c0035-8988-4489-a9e7-f54452a25303 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.974389] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07249} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.974493] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1328.975576] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713c0aab-5aa6-4512-ae80-74c1cb95d60a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.986798] env[62525]: INFO nova.compute.manager [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 27.21 seconds to build instance. [ 1329.017504] env[62525]: DEBUG oslo_vmware.api [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780844, 'name': PowerOnVM_Task, 'duration_secs': 0.735497} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.026248] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 35a2e221-e1c5-49d9-af93-5e5f28c62b8f/35a2e221-e1c5-49d9-af93-5e5f28c62b8f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1329.026667] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1329.026886] env[62525]: INFO nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Took 6.34 seconds to spawn the instance on the hypervisor. [ 1329.027105] env[62525]: DEBUG nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1329.027674] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a11a28b-b3c8-4c8c-9004-383d86b461f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.044040] env[62525]: DEBUG nova.network.neutron [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Updating instance_info_cache with network_info: [{"id": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "address": "fa:16:3e:c6:72:44", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9d7127-13", "ovs_interfaceid": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.048121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fadecd0-b679-4943-97ca-b7f5a4d59eda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.071537] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780846, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.071735] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1329.071735] env[62525]: value = "task-1780847" [ 1329.071735] env[62525]: _type = "Task" [ 1329.071735] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.084108] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.328052] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1329.328344] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 42 to 43 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1329.328755] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1329.491363] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a801904f-8aa3-48e3-9f8a-febce02d436e tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.245s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.552439] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.552875] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Instance network_info: |[{"id": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "address": "fa:16:3e:c6:72:44", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9d7127-13", "ovs_interfaceid": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.557485] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:72:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a9d7127-13cf-4940-83b3-8cecf0ea3a03', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.567871] env[62525]: DEBUG oslo.service.loopingcall [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.567962] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.568729] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a6a6738-45a8-461e-b5cb-86cfd83da689 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.592524] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.598220] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1329.598594] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1329.599221] env[62525]: INFO nova.compute.manager [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Took 26.51 seconds to build instance. [ 1329.600264] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd28a285-bc9f-41f1-adaf-99aa68aab6b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.604816] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.604816] env[62525]: value = "task-1780848" [ 1329.604816] env[62525]: _type = "Task" [ 1329.604816] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.612848] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1329.612848] env[62525]: value = "task-1780849" [ 1329.612848] env[62525]: _type = "Task" [ 1329.612848] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.613246] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.620265] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780848, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.626162] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.652634] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.653214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.653214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.653549] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.653627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.657887] env[62525]: INFO nova.compute.manager [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Terminating instance [ 1329.659721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "refresh_cache-30fbab3d-8141-4d7e-987f-e4f4fc4a1808" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.659879] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquired lock "refresh_cache-30fbab3d-8141-4d7e-987f-e4f4fc4a1808" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.660054] env[62525]: DEBUG nova.network.neutron [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1329.833355] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.425s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.834132] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1329.837162] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.803s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.839118] env[62525]: INFO nova.compute.claims [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1329.860139] env[62525]: DEBUG nova.compute.manager [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Received event network-changed-1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1329.860904] env[62525]: DEBUG nova.compute.manager [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Refreshing instance network info cache due to event network-changed-1a9d7127-13cf-4940-83b3-8cecf0ea3a03. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1329.860904] env[62525]: DEBUG oslo_concurrency.lockutils [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] Acquiring lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.861136] env[62525]: DEBUG oslo_concurrency.lockutils [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] Acquired lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.861447] env[62525]: DEBUG nova.network.neutron [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Refreshing network info cache for port 1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1329.995770] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1330.108477] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cc07fa46-19a6-4b7f-9fd2-db31d272ba5d tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.821s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.108901] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780847, 'name': ReconfigVM_Task, 'duration_secs': 1.026957} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.114015] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 35a2e221-e1c5-49d9-af93-5e5f28c62b8f/35a2e221-e1c5-49d9-af93-5e5f28c62b8f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.114812] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97bd9cd5-2059-4180-adde-bdb09665aac7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.125844] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780848, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.134031] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07185} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.135053] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.135376] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1330.135376] env[62525]: value = "task-1780850" [ 1330.135376] env[62525]: _type = "Task" [ 1330.135376] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.136169] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552d6e3c-62c6-41a2-9024-122b90e15974 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.164295] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.170514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3634e772-23a1-46ab-b444-ab2e6231aa24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.185714] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780850, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.192444] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1330.192444] env[62525]: value = "task-1780851" [ 1330.192444] env[62525]: _type = "Task" [ 1330.192444] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.203384] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780851, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.309733] env[62525]: DEBUG nova.network.neutron [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1330.343680] env[62525]: DEBUG nova.compute.utils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1330.347289] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1330.347619] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1330.475675] env[62525]: DEBUG nova.network.neutron [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.486251] env[62525]: DEBUG nova.policy [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62d1b3275d194480ab34f8d437934dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f95b8120cae4ff68fff82bf8e933c24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1330.527101] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.616942] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1330.627424] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780848, 'name': CreateVM_Task, 'duration_secs': 0.608735} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.627424] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.627578] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.627729] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.628031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.628992] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb037e2-9477-4ab5-8e3a-665a04093d14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.634651] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1330.634651] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526631bb-c1dd-fe74-684e-052c927376f4" [ 1330.634651] env[62525]: _type = "Task" [ 1330.634651] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.650933] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526631bb-c1dd-fe74-684e-052c927376f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.655338] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780850, 'name': Rename_Task, 'duration_secs': 0.33641} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.655338] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1330.655662] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ded0780-7d4f-44ef-8050-1875cb96eee5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.668364] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1330.668364] env[62525]: value = "task-1780852" [ 1330.668364] env[62525]: _type = "Task" [ 1330.668364] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.677821] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.706971] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.849172] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1330.977945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Releasing lock "refresh_cache-30fbab3d-8141-4d7e-987f-e4f4fc4a1808" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.978414] env[62525]: DEBUG nova.compute.manager [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1330.978611] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1330.979560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10122d0f-5246-4199-a506-fef194072486 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.994884] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1330.994884] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00f3a2f6-0a1e-4bc6-9ad3-878c6089dc8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.007651] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1331.007651] env[62525]: value = "task-1780853" [ 1331.007651] env[62525]: _type = "Task" [ 1331.007651] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.023404] env[62525]: DEBUG nova.network.neutron [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Updated VIF entry in instance network info cache for port 1a9d7127-13cf-4940-83b3-8cecf0ea3a03. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1331.023756] env[62525]: DEBUG nova.network.neutron [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Updating instance_info_cache with network_info: [{"id": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "address": "fa:16:3e:c6:72:44", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a9d7127-13", "ovs_interfaceid": "1a9d7127-13cf-4940-83b3-8cecf0ea3a03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.025083] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.083126] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Successfully created port: cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.153832] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526631bb-c1dd-fe74-684e-052c927376f4, 'name': SearchDatastore_Task, 'duration_secs': 0.014071} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.157344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.157825] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.158267] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.158576] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.159768] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.161230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.162018] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f27f9d6-f38d-4ce4-9e7e-37b0c8538ab0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.175344] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.175344] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.183924] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89cb8f3b-dc0a-4550-97a7-c843238b069b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.187556] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.199905] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1331.199905] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523431e5-8e99-0d9b-07bd-c2b27250677b" [ 1331.199905] env[62525]: _type = "Task" [ 1331.199905] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.210939] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780851, 'name': ReconfigVM_Task, 'duration_secs': 0.583156} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.212155] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfigured VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.213483] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08b28537-fd73-44f8-a2d0-3d9a6d5356f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.219537] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523431e5-8e99-0d9b-07bd-c2b27250677b, 'name': SearchDatastore_Task, 'duration_secs': 0.014719} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.221294] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b300e97d-a615-4034-9d39-3e4c2820862c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.233136] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1331.233136] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cfc0e6-0d76-9c36-7652-f1b9e5ab1c46" [ 1331.233136] env[62525]: _type = "Task" [ 1331.233136] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.233882] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1331.233882] env[62525]: value = "task-1780854" [ 1331.233882] env[62525]: _type = "Task" [ 1331.233882] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.248428] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780854, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.252408] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cfc0e6-0d76-9c36-7652-f1b9e5ab1c46, 'name': SearchDatastore_Task, 'duration_secs': 0.013867} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.255598] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.256058] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d38bbd59-b40c-4965-b823-caefc93e2568/d38bbd59-b40c-4965-b823-caefc93e2568.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.257664] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-246ae144-bdf2-499d-bf5f-41c8f39a8803 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.265550] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1331.265550] env[62525]: value = "task-1780855" [ 1331.265550] env[62525]: _type = "Task" [ 1331.265550] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.278429] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.372793] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.373087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.515092] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66146c93-2bb6-4c7a-b847-4eed106099ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.521987] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780853, 'name': PowerOffVM_Task, 'duration_secs': 0.340724} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.522730] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1331.522911] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1331.523241] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68bc6dc0-6c8b-44eb-8b63-7566aa57d93e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.527300] env[62525]: DEBUG oslo_concurrency.lockutils [req-d52eea71-61e5-42ca-9ebf-93fa9918e5f7 req-d740b519-70f4-45b0-be2d-cbff6ac287b8 service nova] Releasing lock "refresh_cache-d38bbd59-b40c-4965-b823-caefc93e2568" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.528540] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99105eb3-b9c3-43cf-a62f-8475f4447c73 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.571038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519cb034-9f4b-4411-a0c4-727e370a99e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.573994] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1331.574203] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1331.575268] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Deleting the datastore file [datastore1] 30fbab3d-8141-4d7e-987f-e4f4fc4a1808 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1331.575268] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c5b765d-64c9-4672-9b09-9ba8c2552142 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.586208] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e113ab03-31d9-4314-bf94-54d36db1f742 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.590689] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for the task: (returnval){ [ 1331.590689] env[62525]: value = "task-1780857" [ 1331.590689] env[62525]: _type = "Task" [ 1331.590689] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.603198] env[62525]: DEBUG nova.compute.provider_tree [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.613027] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.684035] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.753676] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780854, 'name': Rename_Task, 'duration_secs': 0.189108} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.754039] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.755332] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc92abce-f5d9-4fba-bb51-2b86b80efc04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.766924] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1331.766924] env[62525]: value = "task-1780858" [ 1331.766924] env[62525]: _type = "Task" [ 1331.766924] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.781079] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780855, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.788530] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.873366] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1331.876869] env[62525]: INFO nova.compute.manager [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Rebuilding instance [ 1331.917112] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1331.917112] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1331.917112] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1331.917441] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1331.917441] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1331.917441] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1331.917441] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1331.917441] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1331.917717] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1331.917717] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1331.917717] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1331.919446] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7960c3ee-05b5-486d-ad37-72b13e5618b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.929424] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e745bf9-736e-47b7-b572-eec857075763 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.963287] env[62525]: DEBUG nova.compute.manager [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1331.963778] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e797bb5f-03fe-4b69-bcaa-df8327394018 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.105230] env[62525]: DEBUG oslo_vmware.api [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Task: {'id': task-1780857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401946} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.105230] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.105230] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.105461] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.105687] env[62525]: INFO nova.compute.manager [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1332.105815] env[62525]: DEBUG oslo.service.loopingcall [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.106816] env[62525]: DEBUG nova.scheduler.client.report [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.109979] env[62525]: DEBUG nova.compute.manager [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1332.111015] env[62525]: DEBUG nova.network.neutron [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1332.176016] env[62525]: DEBUG nova.network.neutron [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1332.183404] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.291327] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.298020] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640881} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.298020] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d38bbd59-b40c-4965-b823-caefc93e2568/d38bbd59-b40c-4965-b823-caefc93e2568.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.298020] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.298382] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89cbfb9c-dda4-44b6-b325-5163cf2dc66c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.308315] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1332.308315] env[62525]: value = "task-1780859" [ 1332.308315] env[62525]: _type = "Task" [ 1332.308315] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.323068] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.482082] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1332.482214] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64afe326-fb45-48d5-a6c2-6e0dd5aaf683 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.496034] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1332.496034] env[62525]: value = "task-1780860" [ 1332.496034] env[62525]: _type = "Task" [ 1332.496034] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.504477] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.614065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.614668] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1332.625034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.158s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.625313] env[62525]: DEBUG nova.objects.instance [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lazy-loading 'resources' on Instance uuid 8c6e22d6-353f-4be5-8400-7fe38a9bee25 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.681027] env[62525]: DEBUG nova.network.neutron [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.683331] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.779693] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780858, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.823021] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074988} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.830444] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.830444] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fde7f19-fe39-467d-a94d-8cc5a7849602 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.855413] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] d38bbd59-b40c-4965-b823-caefc93e2568/d38bbd59-b40c-4965-b823-caefc93e2568.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.855747] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c442313-9bf7-4d8b-aa74-5ea8ea533626 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.877422] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1332.877422] env[62525]: value = "task-1780861" [ 1332.877422] env[62525]: _type = "Task" [ 1332.877422] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.889336] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780861, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.008368] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780860, 'name': PowerOffVM_Task, 'duration_secs': 0.465034} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.008368] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1333.008368] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.009038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20dd359f-5937-4005-b173-59c597fffcdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.022294] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1333.022599] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba3833b0-d246-4526-a11f-88c9bd165f19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.061871] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1333.062170] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1333.062375] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleting the datastore file [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.062668] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d08b7655-e120-43f0-a603-cfbe5879d595 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.076864] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1333.076864] env[62525]: value = "task-1780863" [ 1333.076864] env[62525]: _type = "Task" [ 1333.076864] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.088273] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.127581] env[62525]: DEBUG nova.compute.utils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1333.129388] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1333.129602] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1333.182493] env[62525]: DEBUG oslo_vmware.api [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780852, 'name': PowerOnVM_Task, 'duration_secs': 2.157866} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.185890] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.186132] env[62525]: INFO nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 13.34 seconds to spawn the instance on the hypervisor. [ 1333.186420] env[62525]: DEBUG nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.186757] env[62525]: INFO nova.compute.manager [-] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Took 1.08 seconds to deallocate network for instance. [ 1333.187778] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a784aecd-9246-4f44-a19e-123938a3957a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.241714] env[62525]: DEBUG nova.policy [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62d1b3275d194480ab34f8d437934dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f95b8120cae4ff68fff82bf8e933c24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1333.281083] env[62525]: DEBUG oslo_vmware.api [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780858, 'name': PowerOnVM_Task, 'duration_secs': 1.210303} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.283834] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.284221] env[62525]: DEBUG nova.compute.manager [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1333.285254] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924d5fe5-a863-4c98-bc45-45fef51af37c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.356901] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Successfully updated port: cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1333.390746] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780861, 'name': ReconfigVM_Task, 'duration_secs': 0.338579} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.393519] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Reconfigured VM instance instance-00000015 to attach disk [datastore1] d38bbd59-b40c-4965-b823-caefc93e2568/d38bbd59-b40c-4965-b823-caefc93e2568.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1333.394339] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b01598bf-5813-471f-932c-78a70cd63b20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.402319] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1333.402319] env[62525]: value = "task-1780864" [ 1333.402319] env[62525]: _type = "Task" [ 1333.402319] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.413369] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780864, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.587122] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476175} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.587389] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.587582] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.587744] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.632348] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1333.704847] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.715230] env[62525]: INFO nova.compute.manager [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 31.55 seconds to build instance. [ 1333.736455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f076b3b2-196c-4de0-82ea-7cf21ba860b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.748516] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bb6ce9-1643-44e2-be7e-0337e74435c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.787380] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64262749-7fc3-4c2b-b8c6-50a79d3e56b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.795969] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f659b11c-6438-4d1f-807c-da90cdf16fce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.813734] env[62525]: DEBUG nova.compute.provider_tree [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.816843] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.825860] env[62525]: DEBUG nova.compute.manager [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.826099] env[62525]: DEBUG nova.compute.manager [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing instance network info cache due to event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1333.826267] env[62525]: DEBUG oslo_concurrency.lockutils [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.826428] env[62525]: DEBUG oslo_concurrency.lockutils [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.826599] env[62525]: DEBUG nova.network.neutron [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.862669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.862825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.862981] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1333.913634] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780864, 'name': Rename_Task, 'duration_secs': 0.256925} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.914500] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1333.914798] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0c80d3c-2515-4a07-ab84-f5c1d9bdade9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.924104] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1333.924104] env[62525]: value = "task-1780865" [ 1333.924104] env[62525]: _type = "Task" [ 1333.924104] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.933179] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.950647] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Successfully created port: a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1334.220028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4aa2dfb5-9235-477f-92ba-b4904e98029d tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.026s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.322017] env[62525]: DEBUG nova.scheduler.client.report [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.441635] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780865, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.450819] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1334.623048] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1334.623296] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1334.623455] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.623640] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1334.623785] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.623929] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1334.624294] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1334.624500] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1334.624690] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1334.624854] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1334.625034] env[62525]: DEBUG nova.virt.hardware [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1334.625881] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a78424-6622-4689-86af-185aa96cbdd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.634827] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc860817-6e1c-4b57-95e4-6690aa3dcfa5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.650293] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1334.652458] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1334.658597] env[62525]: DEBUG oslo.service.loopingcall [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1334.660784] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1334.660784] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b885d2fb-6ff9-447e-82e9-9c03f13dc0f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.682269] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1334.682397] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1334.682526] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.682710] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1334.682862] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.682992] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1334.683264] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1334.683431] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1334.683598] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1334.683756] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1334.683924] env[62525]: DEBUG nova.virt.hardware [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1334.684759] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7b0832-17c3-4ea4-8900-3f3ae9486081 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.690963] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.690963] env[62525]: value = "task-1780866" [ 1334.690963] env[62525]: _type = "Task" [ 1334.690963] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.700447] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cffae1a-0edd-412b-8dfb-33dd5301550f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.712300] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780866, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.722380] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1334.827492] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.202s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.830332] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 21.057s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.830551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.831342] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1334.831342] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.734s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.834085] env[62525]: INFO nova.compute.claims [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.839207] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82886d9e-37ea-4d6a-ac60-6a5a7ea6ba91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.845505] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2060e836-b2ee-469f-b94a-4a4b4b0d42c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.867292] env[62525]: INFO nova.scheduler.client.report [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Deleted allocations for instance 8c6e22d6-353f-4be5-8400-7fe38a9bee25 [ 1334.874900] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae48b25-0728-437a-8ada-ccb2fcf3c20e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.882530] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca5e44d-1ca2-4cf4-ad6c-856ea632e254 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.920389] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180542MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1334.920613] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.934842] env[62525]: DEBUG oslo_vmware.api [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780865, 'name': PowerOnVM_Task, 'duration_secs': 0.541716} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.935129] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1334.935527] env[62525]: INFO nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1334.935790] env[62525]: DEBUG nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1334.936839] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15e2db3-d4e5-4bb4-a709-521cce11a40d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.010895] env[62525]: DEBUG nova.compute.manager [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Received event network-vif-plugged-cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1335.011447] env[62525]: DEBUG oslo_concurrency.lockutils [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] Acquiring lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.011680] env[62525]: DEBUG oslo_concurrency.lockutils [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.011846] env[62525]: DEBUG oslo_concurrency.lockutils [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.012034] env[62525]: DEBUG nova.compute.manager [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] No waiting events found dispatching network-vif-plugged-cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1335.012248] env[62525]: WARNING nova.compute.manager [req-c23e500f-cb89-4520-88b7-2aaaa0036aab req-62ad817a-108f-413f-87e1-efa8cd00376e service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Received unexpected event network-vif-plugged-cf53c0bd-6b6d-4e88-b23e-60c86646c0be for instance with vm_state building and task_state spawning. [ 1335.107980] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Updating instance_info_cache with network_info: [{"id": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "address": "fa:16:3e:20:a2:c9", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf53c0bd-6b", "ovs_interfaceid": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.191869] env[62525]: DEBUG nova.network.neutron [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updated VIF entry in instance network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.192282] env[62525]: DEBUG nova.network.neutron [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.209546] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780866, 'name': CreateVM_Task, 'duration_secs': 0.351805} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.209546] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.209546] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.209546] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.209789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1335.210047] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687fc456-de77-48fb-ac44-435c14c94e39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.219233] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1335.219233] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d12df9-8484-e3a3-b76c-43219ff8dcfc" [ 1335.219233] env[62525]: _type = "Task" [ 1335.219233] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.228814] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d12df9-8484-e3a3-b76c-43219ff8dcfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.254492] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.381367] env[62525]: DEBUG oslo_concurrency.lockutils [None req-da037225-6b37-45bd-b593-c7f99b33ef9c tempest-ServerAddressesTestJSON-1031794760 tempest-ServerAddressesTestJSON-1031794760-project-member] Lock "8c6e22d6-353f-4be5-8400-7fe38a9bee25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.627s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.461776] env[62525]: INFO nova.compute.manager [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Took 31.83 seconds to build instance. [ 1335.619501] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.619501] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Instance network_info: |[{"id": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "address": "fa:16:3e:20:a2:c9", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf53c0bd-6b", "ovs_interfaceid": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1335.620184] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:a2:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf53c0bd-6b6d-4e88-b23e-60c86646c0be', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.636290] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating folder: Project (8f95b8120cae4ff68fff82bf8e933c24). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.636662] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9497d56-4e4c-4784-ac86-c1469c3514d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.650059] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created folder: Project (8f95b8120cae4ff68fff82bf8e933c24) in parent group-v369553. [ 1335.650283] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating folder: Instances. Parent ref: group-v369611. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.650564] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e37297c-37c6-440d-9ed3-1f728d326990 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.662033] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created folder: Instances in parent group-v369611. [ 1335.662033] env[62525]: DEBUG oslo.service.loopingcall [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.662033] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.662033] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be9a41e8-72f3-4ec9-848d-015e55739430 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.692526] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.692526] env[62525]: value = "task-1780869" [ 1335.692526] env[62525]: _type = "Task" [ 1335.692526] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.693087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.693455] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.700938] env[62525]: DEBUG oslo_concurrency.lockutils [req-841dae97-61fe-40a4-a12f-d6600aba6260 req-f60d69f1-f2f9-482c-a1b4-99f0e65de177 service nova] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.708244] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780869, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.735343] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d12df9-8484-e3a3-b76c-43219ff8dcfc, 'name': SearchDatastore_Task, 'duration_secs': 0.012443} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.735848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.736036] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.736836] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.736836] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.736950] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.740339] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1090ce5e-db18-4b64-a895-bb115428ce04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.753863] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.754280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1335.756285] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edfeaf68-6898-4da5-b5c1-6154a97ee1fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.767594] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1335.767594] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526136c4-0875-d682-98d8-cf74312d4834" [ 1335.767594] env[62525]: _type = "Task" [ 1335.767594] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.782260] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526136c4-0875-d682-98d8-cf74312d4834, 'name': SearchDatastore_Task, 'duration_secs': 0.011129} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.783796] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a59b44c-67d2-4980-889d-b8a36bc01396 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.791891] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1335.791891] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fd0726-e2f9-c898-557c-df2512d8e360" [ 1335.791891] env[62525]: _type = "Task" [ 1335.791891] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.806245] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd0726-e2f9-c898-557c-df2512d8e360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.965249] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7fbbfd9d-f138-4417-a228-5dfad95893d6 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.650s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.209772] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780869, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.269838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "2f589dc1-9244-475f-86d0-4b69b511508b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.270096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.310701] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd0726-e2f9-c898-557c-df2512d8e360, 'name': SearchDatastore_Task, 'duration_secs': 0.011661} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.310701] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.310701] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1336.310701] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80b4acd6-e5d1-4921-899f-49aca269d6bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.317812] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1336.317812] env[62525]: value = "task-1780870" [ 1336.317812] env[62525]: _type = "Task" [ 1336.317812] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.334715] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.470285] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1336.522931] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026e7c48-c018-4274-8956-36606e6967a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.532769] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585b0c50-09bb-4be0-a009-f5dad3df4df4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.565255] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011e07c1-123c-4e26-8790-4ee73de6e1c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.573670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44579ac5-e2fa-4c2f-8212-02057af1702c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.590208] env[62525]: DEBUG nova.compute.provider_tree [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.592356] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Successfully updated port: a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1336.706200] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780869, 'name': CreateVM_Task, 'duration_secs': 0.605558} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.706578] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.707157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.707354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.707736] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1336.707995] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6218aa90-c21c-4d99-b3b1-6a2b384985df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.713129] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1336.713129] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e0ad8-1746-a0dd-057a-6e5584137493" [ 1336.713129] env[62525]: _type = "Task" [ 1336.713129] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.730309] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528e0ad8-1746-a0dd-057a-6e5584137493, 'name': SearchDatastore_Task, 'duration_secs': 0.012391} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.730662] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.730941] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.731173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.731327] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.731521] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.731926] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51eb51a8-3385-41f1-b04d-2d08c1b4bf69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.740265] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.740722] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1336.741538] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b852791a-2e67-4cb9-a74d-9909e9fbb51e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.747979] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1336.747979] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ae4dc0-7f68-80bb-a501-296ee54ca6a1" [ 1336.747979] env[62525]: _type = "Task" [ 1336.747979] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.756755] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ae4dc0-7f68-80bb-a501-296ee54ca6a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.838745] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780870, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.002380] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.095349] env[62525]: DEBUG nova.scheduler.client.report [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.101169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.101908] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.101908] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1337.259226] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ae4dc0-7f68-80bb-a501-296ee54ca6a1, 'name': SearchDatastore_Task, 'duration_secs': 0.033852} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.260452] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fb3c306-94a3-4567-9648-ad5f8d33289d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.267094] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1337.267094] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52090a2e-c303-c7e4-d0ab-6118ae8280fd" [ 1337.267094] env[62525]: _type = "Task" [ 1337.267094] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.279376] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52090a2e-c303-c7e4-d0ab-6118ae8280fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.335780] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790078} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.337085] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1337.337085] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1337.337085] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f23970fe-4b76-4976-b86b-954416c7f852 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.347494] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1337.347494] env[62525]: value = "task-1780871" [ 1337.347494] env[62525]: _type = "Task" [ 1337.347494] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.356821] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.606033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.606602] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1337.613087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.368s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.617123] env[62525]: INFO nova.compute.claims [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.667877] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1337.782592] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52090a2e-c303-c7e4-d0ab-6118ae8280fd, 'name': SearchDatastore_Task, 'duration_secs': 0.01207} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.785732] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.785844] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1277dac8-3a23-4de8-93c7-c967b0eaf6ba/1277dac8-3a23-4de8-93c7-c967b0eaf6ba.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1337.786175] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc333544-35f4-4b8a-b082-936f1c7a53aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.794852] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1337.794852] env[62525]: value = "task-1780872" [ 1337.794852] env[62525]: _type = "Task" [ 1337.794852] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.804901] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.843968] env[62525]: DEBUG nova.compute.manager [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1337.845640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa05adf-1c8f-4023-a5a3-d5b06c2ea2f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.863921] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192131} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.864466] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1337.865244] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2853c7b-450b-4290-8de4-375a7d978a8e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.895584] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1337.900411] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bfd79a8-9273-4bb5-8b8f-3eb3af43b482 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.923838] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1337.923838] env[62525]: value = "task-1780873" [ 1337.923838] env[62525]: _type = "Task" [ 1337.923838] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.935230] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.014412] env[62525]: DEBUG nova.network.neutron [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Updating instance_info_cache with network_info: [{"id": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "address": "fa:16:3e:0d:52:38", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9e5d810-d2", "ovs_interfaceid": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.070720] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080fd214-8602-41ee-a68a-b01717443e9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.083617] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Suspending the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1338.083991] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-7fcbe0a9-f659-4309-afec-a6721c46e117 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.091357] env[62525]: DEBUG oslo_vmware.api [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] Waiting for the task: (returnval){ [ 1338.091357] env[62525]: value = "task-1780874" [ 1338.091357] env[62525]: _type = "Task" [ 1338.091357] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.105723] env[62525]: DEBUG oslo_vmware.api [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] Task: {'id': task-1780874, 'name': SuspendVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.114174] env[62525]: DEBUG nova.compute.utils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.115803] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1338.115861] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.142228] env[62525]: DEBUG nova.compute.manager [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1338.142470] env[62525]: DEBUG nova.compute.manager [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing instance network info cache due to event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1338.143332] env[62525]: DEBUG oslo_concurrency.lockutils [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.144045] env[62525]: DEBUG oslo_concurrency.lockutils [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.144222] env[62525]: DEBUG nova.network.neutron [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.190824] env[62525]: DEBUG nova.policy [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0deb1ab43142f29a15397a2e23d048', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105f108590e14c649fff545b5b96f4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1338.311908] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780872, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.319751] env[62525]: INFO nova.compute.manager [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Rebuilding instance [ 1338.365666] env[62525]: INFO nova.compute.manager [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] instance snapshotting [ 1338.368981] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfd57e1-7ce2-4c3b-8116-96c283e539da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.400258] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f80fd2-164d-499d-8691-21e26b3bd005 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.405155] env[62525]: DEBUG nova.compute.manager [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1338.405155] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e86bd7-5323-4b52-8c5e-d7ba8172b666 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.445022] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780873, 'name': ReconfigVM_Task, 'duration_secs': 0.313206} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.445022] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb/8b41bff7-137f-489c-bb88-7487eb8e97cb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1338.445022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77ec4997-e659-4eb0-a729-64800a9bdccd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.453793] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1338.453793] env[62525]: value = "task-1780875" [ 1338.453793] env[62525]: _type = "Task" [ 1338.453793] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.466147] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780875, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.518617] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.518873] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Instance network_info: |[{"id": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "address": "fa:16:3e:0d:52:38", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9e5d810-d2", "ovs_interfaceid": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1338.519325] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:52:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9e5d810-d294-43e0-8f03-aaf8ca59d0dd', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1338.527203] env[62525]: DEBUG oslo.service.loopingcall [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1338.527811] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1338.528055] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9891bdad-f231-4ea0-9607-35aef708f7b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.546772] env[62525]: DEBUG nova.compute.manager [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Received event network-changed-cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1338.546967] env[62525]: DEBUG nova.compute.manager [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Refreshing instance network info cache due to event network-changed-cf53c0bd-6b6d-4e88-b23e-60c86646c0be. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1338.547434] env[62525]: DEBUG oslo_concurrency.lockutils [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] Acquiring lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1338.547575] env[62525]: DEBUG oslo_concurrency.lockutils [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] Acquired lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.547893] env[62525]: DEBUG nova.network.neutron [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Refreshing network info cache for port cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.559194] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1338.559194] env[62525]: value = "task-1780876" [ 1338.559194] env[62525]: _type = "Task" [ 1338.559194] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.569888] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780876, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.602718] env[62525]: DEBUG oslo_vmware.api [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] Task: {'id': task-1780874, 'name': SuspendVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.623266] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1338.806657] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Successfully created port: cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.819046] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "d2e7c558-02af-477c-b996-239ef14ed75b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.819311] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.825413] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764304} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.825697] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1277dac8-3a23-4de8-93c7-c967b0eaf6ba/1277dac8-3a23-4de8-93c7-c967b0eaf6ba.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1338.825921] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1338.826194] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecdd306e-8254-44cd-abfb-57561679592d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.838047] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1338.838047] env[62525]: value = "task-1780877" [ 1338.838047] env[62525]: _type = "Task" [ 1338.838047] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.850239] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.921255] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.922629] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1338.923060] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d30d691c-1db9-4a4e-9d64-c1ff26352ce5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.929073] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f2e8d60b-9a09-463b-a88a-8afe4f806dfa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.938394] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1338.938394] env[62525]: value = "task-1780879" [ 1338.938394] env[62525]: _type = "Task" [ 1338.938394] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.939220] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1338.939220] env[62525]: value = "task-1780878" [ 1338.939220] env[62525]: _type = "Task" [ 1338.939220] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.953058] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780879, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.957784] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.973737] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780875, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.022121] env[62525]: DEBUG nova.network.neutron [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updated VIF entry in instance network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.022459] env[62525]: DEBUG nova.network.neutron [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.069826] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780876, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.117416] env[62525]: DEBUG oslo_vmware.api [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] Task: {'id': task-1780874, 'name': SuspendVM_Task, 'duration_secs': 0.674931} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.122202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Suspended the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1339.122202] env[62525]: DEBUG nova.compute.manager [None req-2d19fc9b-a3a2-4b16-8def-0ea8eab0592d tempest-ServersAdminNegativeTestJSON-1382129599 tempest-ServersAdminNegativeTestJSON-1382129599-project-admin] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1339.123822] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19aca5b-9a7f-4cea-862d-45e6624383d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.323916] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "1f3792c0-9f86-4d76-a1a6-28d492869046" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.324172] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.336026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b01e54e-3dc4-4611-89d1-5501c74c0f96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.350788] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844a9030-b1b5-4747-9b62-3b005324a97e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.354639] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065588} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.357405] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.358703] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fb7481-e51c-4212-a09f-adcc74be3e7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.392302] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3eb404a-f176-4a4a-bd74-8c51fa909b1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.413819] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 1277dac8-3a23-4de8-93c7-c967b0eaf6ba/1277dac8-3a23-4de8-93c7-c967b0eaf6ba.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.417784] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e95dfe8-954c-41eb-a897-77988098bbce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.437107] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097b86eb-1ae2-4e1f-94be-420d4bd1b8cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.442726] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1339.442726] env[62525]: value = "task-1780880" [ 1339.442726] env[62525]: _type = "Task" [ 1339.442726] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.460233] env[62525]: DEBUG nova.compute.provider_tree [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.472246] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780878, 'name': PowerOffVM_Task, 'duration_secs': 0.31566} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.472509] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780879, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.476868] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1339.477259] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1339.477731] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.482162] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bce3e5-762d-4a11-b86d-b29571051566 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.485172] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780875, 'name': Rename_Task, 'duration_secs': 0.889} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.485431] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.488731] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94473fb7-9d49-4d28-896c-c6c7c3bc68e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.490256] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1339.490857] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7db4317-de30-4d0f-8b54-b0824db0805a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.496028] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1339.496028] env[62525]: value = "task-1780881" [ 1339.496028] env[62525]: _type = "Task" [ 1339.496028] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.507203] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.515909] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1339.515909] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1339.515909] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Deleting the datastore file [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.516024] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e544e413-9587-478a-a9d1-11bc2a611406 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.525923] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1339.525923] env[62525]: value = "task-1780883" [ 1339.525923] env[62525]: _type = "Task" [ 1339.525923] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.527342] env[62525]: DEBUG oslo_concurrency.lockutils [req-811e6ce0-08c6-4c3a-9045-b95cdbf85fb3 req-a827a063-706c-4549-b786-2a89ef5bfa89 service nova] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.532672] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.568309] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780876, 'name': CreateVM_Task, 'duration_secs': 1.001189} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.568417] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1339.569228] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.570276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.570276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1339.570276] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-341c552a-7a06-4b9b-8f2b-bea39f65f81b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.575025] env[62525]: DEBUG nova.network.neutron [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Updated VIF entry in instance network info cache for port cf53c0bd-6b6d-4e88-b23e-60c86646c0be. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.575025] env[62525]: DEBUG nova.network.neutron [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Updating instance_info_cache with network_info: [{"id": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "address": "fa:16:3e:20:a2:c9", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf53c0bd-6b", "ovs_interfaceid": "cf53c0bd-6b6d-4e88-b23e-60c86646c0be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.576197] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1339.576197] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523f3b12-82e2-cfcd-0d5f-077ff74f61af" [ 1339.576197] env[62525]: _type = "Task" [ 1339.576197] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.585071] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f3b12-82e2-cfcd-0d5f-077ff74f61af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.637798] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1339.675873] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.676517] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.676517] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.676517] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.676714] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.677305] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.677305] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.677538] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.677714] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.678449] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.678449] env[62525]: DEBUG nova.virt.hardware [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.679227] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0d2838-16a9-41a4-a74f-bb27d93c4d4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.687324] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88df7403-8eb4-4367-9f5d-05f32618a805 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.951629] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780879, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.960402] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780880, 'name': ReconfigVM_Task, 'duration_secs': 0.390468} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.960805] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 1277dac8-3a23-4de8-93c7-c967b0eaf6ba/1277dac8-3a23-4de8-93c7-c967b0eaf6ba.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1339.961500] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96bed8a5-2329-4007-bedb-374c8b64a4db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.963718] env[62525]: DEBUG nova.scheduler.client.report [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1339.971701] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1339.971701] env[62525]: value = "task-1780884" [ 1339.971701] env[62525]: _type = "Task" [ 1339.971701] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.983910] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780884, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.004986] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780881, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.032964] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237221} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.033069] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.033295] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1340.033515] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1340.077748] env[62525]: DEBUG oslo_concurrency.lockutils [req-a73d1565-e7a3-45f4-a9d0-ac0386a8a9c8 req-407339a6-7c66-4614-a726-bc9387ec6ea0 service nova] Releasing lock "refresh_cache-1277dac8-3a23-4de8-93c7-c967b0eaf6ba" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.088362] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f3b12-82e2-cfcd-0d5f-077ff74f61af, 'name': SearchDatastore_Task, 'duration_secs': 0.029709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.088698] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.088958] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1340.089213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.089360] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.089535] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.089792] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d18a6d7-6673-4cea-84de-021143de7000 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.100088] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.100367] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1340.101457] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-918aa5fa-8801-4765-87ec-fe422d6d8a04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.108068] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1340.108068] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523e468b-118b-fae7-c971-84db4c90285a" [ 1340.108068] env[62525]: _type = "Task" [ 1340.108068] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.116572] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523e468b-118b-fae7-c971-84db4c90285a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.453248] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780879, 'name': CreateSnapshot_Task, 'duration_secs': 1.296364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.453517] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1340.454277] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5e2366-1c21-4fff-8347-669839777dbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.468744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.856s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.469231] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1340.475590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.701s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.479295] env[62525]: INFO nova.compute.claims [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1340.492247] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780884, 'name': Rename_Task, 'duration_secs': 0.260337} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.492508] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1340.495225] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc4bf962-4cc1-4ef1-bc78-ab75d76b73e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.501778] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1340.501778] env[62525]: value = "task-1780885" [ 1340.501778] env[62525]: _type = "Task" [ 1340.501778] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.509482] env[62525]: DEBUG oslo_vmware.api [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780881, 'name': PowerOnVM_Task, 'duration_secs': 0.954179} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.510128] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.510339] env[62525]: DEBUG nova.compute.manager [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1340.511360] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ee68bb-b224-4d14-970b-2817588f276f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.520553] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.619883] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523e468b-118b-fae7-c971-84db4c90285a, 'name': SearchDatastore_Task, 'duration_secs': 0.015727} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.620639] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a996ddd0-3a06-4bab-887a-a5dc3adc8cdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.626346] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1340.626346] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526a13ea-3905-1010-1c11-9735bff0822c" [ 1340.626346] env[62525]: _type = "Task" [ 1340.626346] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.634315] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526a13ea-3905-1010-1c11-9735bff0822c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.914719] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1340.914905] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing instance network info cache due to event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1340.915169] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Acquiring lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.915345] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Acquired lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.915469] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing network info cache for port cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1340.981569] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1340.981569] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1c632a41-5f85-4cbd-9360-4daf21794912 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.987646] env[62525]: DEBUG nova.compute.utils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1340.991552] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1340.991958] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1341.001020] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1341.001020] env[62525]: value = "task-1780886" [ 1341.001020] env[62525]: _type = "Task" [ 1341.001020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.018020] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780886, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.020136] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780885, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.040206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.081069] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1341.081347] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1341.085276] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.085276] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1341.085276] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.085276] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1341.085276] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1341.085508] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1341.085508] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1341.085508] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1341.085508] env[62525]: DEBUG nova.virt.hardware [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1341.086236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59149be-fe46-4946-b833-d0c2ea071bd4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.094185] env[62525]: DEBUG nova.policy [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb28cb6694f3476e9684d948fbb4dc9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd736ea6d81104e94a75b207da00008d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1341.103076] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4a2d85-0cbe-451b-9561-a1c037c28ffe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.128720] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1341.139344] env[62525]: DEBUG oslo.service.loopingcall [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.144473] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1341.144951] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c9799d4-145f-477e-9392-a35bfec94b49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.173818] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526a13ea-3905-1010-1c11-9735bff0822c, 'name': SearchDatastore_Task, 'duration_secs': 0.021182} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.176221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.176713] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e3255df2-2de0-4668-ad7b-a864ea680b44/e3255df2-2de0-4668-ad7b-a864ea680b44.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1341.177228] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1341.177228] env[62525]: value = "task-1780887" [ 1341.177228] env[62525]: _type = "Task" [ 1341.177228] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.179190] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4311e4e5-b22c-424e-874c-8a2e32fa7f8e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.198559] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780887, 'name': CreateVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.200807] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1341.200807] env[62525]: value = "task-1780888" [ 1341.200807] env[62525]: _type = "Task" [ 1341.200807] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.213379] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.239935] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Successfully updated port: cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.361218] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.361627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.492299] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1341.518886] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780886, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.522823] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780885, 'name': PowerOnVM_Task, 'duration_secs': 0.797133} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.523199] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1341.524256] env[62525]: INFO nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1341.524256] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1341.525309] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c348624-7e74-4d1f-9e2e-d47ef4a56fa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.701895] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780887, 'name': CreateVM_Task, 'duration_secs': 0.313863} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.708970] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1341.710171] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.710368] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.710723] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1341.712619] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-210bfa8c-96b2-4784-aaee-412f5e3c3809 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.722428] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780888, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.737953] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1341.737953] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d763eb-91e9-48a8-0501-7a82f52fc7e6" [ 1341.737953] env[62525]: _type = "Task" [ 1341.737953] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.743200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.743407] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.746461] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1341.749154] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d763eb-91e9-48a8-0501-7a82f52fc7e6, 'name': SearchDatastore_Task, 'duration_secs': 0.014081} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.752555] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.752842] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1341.753085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.753846] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.753846] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1341.754401] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6834bb0f-73a4-4e2e-96a8-03ff2a971f4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.767586] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1341.767830] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1341.768652] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f83627-1837-4e61-aad9-907b88efeaf7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.780457] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1341.780457] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e3ee86-de85-ac21-a76d-d9439612a4a3" [ 1341.780457] env[62525]: _type = "Task" [ 1341.780457] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.789734] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3ee86-de85-ac21-a76d-d9439612a4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.790816] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updated VIF entry in instance network info cache for port cf41a826-2546-4877-b604-5fd32f6cc102. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1341.791054] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [{"id": "cf41a826-2546-4877-b604-5fd32f6cc102", "address": "fa:16:3e:0b:1f:7e", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf41a826-25", "ovs_interfaceid": "cf41a826-2546-4877-b604-5fd32f6cc102", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.947789] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Successfully created port: d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1342.020250] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780886, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.049121] env[62525]: INFO nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Took 33.12 seconds to build instance. [ 1342.166981] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592f3ea3-7b22-430a-aa20-55daf3a3a031 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.173411] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9030ac5-8b86-40e4-8611-93ffa293a875 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.214927] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0ad12d-5976-4ab0-ac44-5e179f65b888 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.227616] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.229697] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e3255df2-2de0-4668-ad7b-a864ea680b44/e3255df2-2de0-4668-ad7b-a864ea680b44.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1342.229921] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1342.230230] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6ce04de-806d-4549-b240-c1459af0ca0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.232988] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a262bf-7cd2-4f0b-af44-60e7f464c3ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.248711] env[62525]: DEBUG nova.compute.provider_tree [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.252101] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1342.252101] env[62525]: value = "task-1780889" [ 1342.252101] env[62525]: _type = "Task" [ 1342.252101] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.262566] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780889, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.293210] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3ee86-de85-ac21-a76d-d9439612a4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.014421} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.295847] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1342.298875] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Releasing lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.299169] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Received event network-vif-plugged-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1342.299376] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Acquiring lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.299646] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.299748] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.299936] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] No waiting events found dispatching network-vif-plugged-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1342.300149] env[62525]: WARNING nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Received unexpected event network-vif-plugged-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd for instance with vm_state building and task_state spawning. [ 1342.300330] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Received event network-changed-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1342.300484] env[62525]: DEBUG nova.compute.manager [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Refreshing instance network info cache due to event network-changed-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1342.300678] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Acquiring lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.300809] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Acquired lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.301015] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Refreshing network info cache for port a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1342.303809] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-604a3732-0c67-4ef7-a7a4-234321020244 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.313044] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1342.313044] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520ccb83-e284-40d1-30ce-00ecbf99421b" [ 1342.313044] env[62525]: _type = "Task" [ 1342.313044] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.322036] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ccb83-e284-40d1-30ce-00ecbf99421b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.506178] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1342.518274] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780886, 'name': CloneVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.536402] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.536671] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.536841] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.537067] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.537170] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.537313] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.537651] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.537651] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.537810] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.537970] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.538162] env[62525]: DEBUG nova.virt.hardware [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.539386] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afcb652-5731-4945-af42-7034b89f1147 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.547263] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fd35a1-101f-4529-a1fc-44efcc67b58e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.551591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.794s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.564928] env[62525]: DEBUG nova.network.neutron [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Updating instance_info_cache with network_info: [{"id": "cf67116f-5315-4381-89ee-63b52941270b", "address": "fa:16:3e:4f:56:5e", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf67116f-53", "ovs_interfaceid": "cf67116f-5315-4381-89ee-63b52941270b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.757027] env[62525]: DEBUG nova.scheduler.client.report [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1342.772442] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780889, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07009} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.772952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.773306] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.773654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.775028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.775028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.777400] env[62525]: INFO nova.compute.manager [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Terminating instance [ 1342.779889] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1342.783247] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8228febe-be82-4caa-bb03-c0f1cac7f823 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.784313] env[62525]: DEBUG nova.compute.manager [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1342.784634] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1342.785947] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84737302-0d9e-45f8-a043-b2abddda05c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.810278] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] e3255df2-2de0-4668-ad7b-a864ea680b44/e3255df2-2de0-4668-ad7b-a864ea680b44.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1342.817197] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecb364ae-ab3e-45de-ba38-a1a542b79ba1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.832259] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1342.835878] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b28c1c7a-d27c-4132-8298-d9837fe9409c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.847240] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ccb83-e284-40d1-30ce-00ecbf99421b, 'name': SearchDatastore_Task, 'duration_secs': 0.017018} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.849608] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.850008] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1342.850455] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1342.850455] env[62525]: value = "task-1780891" [ 1342.850455] env[62525]: _type = "Task" [ 1342.850455] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.850686] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1342.850686] env[62525]: value = "task-1780890" [ 1342.850686] env[62525]: _type = "Task" [ 1342.850686] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.850893] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3476844d-ea06-4b09-8532-51a0006ba1da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.865017] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1342.865017] env[62525]: value = "task-1780892" [ 1342.865017] env[62525]: _type = "Task" [ 1342.865017] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.872024] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.872024] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.879826] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.022929] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780886, 'name': CloneVM_Task, 'duration_secs': 1.559967} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.026318] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Created linked-clone VM from snapshot [ 1343.026738] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdc0c76-e5c2-44a1-916b-700833758075 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.036165] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Uploading image a382955a-c365-48c6-8634-872090f3a433 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1343.053864] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1343.069572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.069572] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Instance network_info: |[{"id": "cf67116f-5315-4381-89ee-63b52941270b", "address": "fa:16:3e:4f:56:5e", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf67116f-53", "ovs_interfaceid": "cf67116f-5315-4381-89ee-63b52941270b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1343.069711] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:56:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf67116f-5315-4381-89ee-63b52941270b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.078987] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating folder: Project (105f108590e14c649fff545b5b96f4fd). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.082039] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1343.082039] env[62525]: value = "vm-369616" [ 1343.082039] env[62525]: _type = "VirtualMachine" [ 1343.082039] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1343.082039] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f361652-8279-4cf9-908b-9f9c2400b5de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.084163] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9471f3fc-4fa8-468d-b618-3330cca57c8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.093508] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lease: (returnval){ [ 1343.093508] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52911975-fcc5-6173-56e5-68aafb80d861" [ 1343.093508] env[62525]: _type = "HttpNfcLease" [ 1343.093508] env[62525]: } obtained for exporting VM: (result){ [ 1343.093508] env[62525]: value = "vm-369616" [ 1343.093508] env[62525]: _type = "VirtualMachine" [ 1343.093508] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1343.093913] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the lease: (returnval){ [ 1343.093913] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52911975-fcc5-6173-56e5-68aafb80d861" [ 1343.093913] env[62525]: _type = "HttpNfcLease" [ 1343.093913] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1343.095585] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created folder: Project (105f108590e14c649fff545b5b96f4fd) in parent group-v369553. [ 1343.095774] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating folder: Instances. Parent ref: group-v369618. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.099619] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65ee5f8d-67e1-4a04-b688-29f6f7ddcc51 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.108124] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1343.108124] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52911975-fcc5-6173-56e5-68aafb80d861" [ 1343.108124] env[62525]: _type = "HttpNfcLease" [ 1343.108124] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1343.110074] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created folder: Instances in parent group-v369618. [ 1343.110382] env[62525]: DEBUG oslo.service.loopingcall [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1343.111203] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1343.111203] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90eec30a-0366-4fc0-b04b-b4a98a0d73d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.132471] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1343.132471] env[62525]: value = "task-1780896" [ 1343.132471] env[62525]: _type = "Task" [ 1343.132471] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.141610] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.143340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.143340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "8b41bff7-137f-489c-bb88-7487eb8e97cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.143340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.143340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.144086] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780896, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.144978] env[62525]: INFO nova.compute.manager [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Terminating instance [ 1343.148055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "refresh_cache-8b41bff7-137f-489c-bb88-7487eb8e97cb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.148055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "refresh_cache-8b41bff7-137f-489c-bb88-7487eb8e97cb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.148055] env[62525]: DEBUG nova.network.neutron [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1343.266642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.791s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.267811] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1343.270845] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.609s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.273231] env[62525]: INFO nova.compute.claims [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1343.367901] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780891, 'name': PowerOffVM_Task, 'duration_secs': 0.279638} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.376246] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1343.376246] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1343.376246] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780890, 'name': ReconfigVM_Task, 'duration_secs': 0.368871} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.380375] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-840a62b6-d3a5-4dee-a205-717f5139d5cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.387259] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Reconfigured VM instance instance-00000017 to attach disk [datastore1] e3255df2-2de0-4668-ad7b-a864ea680b44/e3255df2-2de0-4668-ad7b-a864ea680b44.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1343.388930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2c27c8c-38d9-4960-9f7a-af202fb3e18a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.397404] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780892, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53001} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.399315] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1343.399629] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1343.400022] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1343.400022] env[62525]: value = "task-1780898" [ 1343.400022] env[62525]: _type = "Task" [ 1343.400022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.400309] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18e94a94-cee2-4a32-a180-2ef6ab2ab8bf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.411751] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780898, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.413209] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1343.413209] env[62525]: value = "task-1780899" [ 1343.413209] env[62525]: _type = "Task" [ 1343.413209] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.424406] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.476631] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1343.476958] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1343.477187] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleting the datastore file [datastore1] b6bdc187-a266-4f7d-a9e4-85cb100cf4bf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1343.477464] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d91c250-4634-4f2e-82eb-f6e5d104221a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.484957] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1343.484957] env[62525]: value = "task-1780900" [ 1343.484957] env[62525]: _type = "Task" [ 1343.484957] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.499197] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.578883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.602634] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1343.602634] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52911975-fcc5-6173-56e5-68aafb80d861" [ 1343.602634] env[62525]: _type = "HttpNfcLease" [ 1343.602634] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1343.603173] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1343.603173] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52911975-fcc5-6173-56e5-68aafb80d861" [ 1343.603173] env[62525]: _type = "HttpNfcLease" [ 1343.603173] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1343.603990] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35112a86-ab35-4d2b-b6ed-c30a2842e4fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.613429] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1343.613668] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1343.617672] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Updated VIF entry in instance network info cache for port a9e5d810-d294-43e0-8f03-aaf8ca59d0dd. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1343.618056] env[62525]: DEBUG nova.network.neutron [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Updating instance_info_cache with network_info: [{"id": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "address": "fa:16:3e:0d:52:38", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9e5d810-d2", "ovs_interfaceid": "a9e5d810-d294-43e0-8f03-aaf8ca59d0dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.697359] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780896, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.709625] env[62525]: DEBUG nova.network.neutron [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.724494] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-126f63c1-4be5-47ba-8737-ce92829b32e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.750929] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Successfully updated port: d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.781116] env[62525]: DEBUG nova.compute.utils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1343.784769] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1343.784870] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1343.830424] env[62525]: DEBUG nova.policy [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f11db7435d34e32bb68fb1eed112ec1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10ccf8b9c79d4833b636182bd8406921', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1343.835948] env[62525]: DEBUG nova.network.neutron [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.913757] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780898, 'name': Rename_Task, 'duration_secs': 0.159944} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.914765] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1343.917727] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae8fe0a5-a9ad-47c7-9c7f-e46f9fddfbdd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.925054] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080158} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.927072] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1343.927072] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1343.927072] env[62525]: value = "task-1780901" [ 1343.927072] env[62525]: _type = "Task" [ 1343.927072] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.927783] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdce7343-24b4-405b-8a6e-e668a3efe4f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.953274] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1343.957861] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8a9b4cc-d646-4d33-b97f-bb71eeaa42d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.971042] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.977354] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1343.977354] env[62525]: value = "task-1780902" [ 1343.977354] env[62525]: _type = "Task" [ 1343.977354] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.986486] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.999661] env[62525]: DEBUG oslo_vmware.api [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1780900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.45895} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.000730] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.001029] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1344.001655] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1344.001916] env[62525]: INFO nova.compute.manager [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1344.002215] env[62525]: DEBUG oslo.service.loopingcall [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.002521] env[62525]: DEBUG nova.compute.manager [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1344.002633] env[62525]: DEBUG nova.network.neutron [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1344.166124] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Successfully created port: 1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1344.189280] env[62525]: DEBUG oslo_concurrency.lockutils [req-59c7f79f-9409-43b9-8a9e-d39788922056 req-7234a7d3-367c-4c2d-845e-2e3a1a85767a service nova] Releasing lock "refresh_cache-e3255df2-2de0-4668-ad7b-a864ea680b44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.203831] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780896, 'name': CreateVM_Task, 'duration_secs': 0.910966} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.206289] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.210021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.210021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.210021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.210021] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f86b5a42-60b0-4c92-aff8-1579d6400cdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.216614] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1344.216614] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f0a3b5-b914-5e0a-c69b-7ef7e571c923" [ 1344.216614] env[62525]: _type = "Task" [ 1344.216614] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.229763] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f0a3b5-b914-5e0a-c69b-7ef7e571c923, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.255329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.255794] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquired lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.257449] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.292427] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1344.338831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "refresh_cache-8b41bff7-137f-489c-bb88-7487eb8e97cb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.339296] env[62525]: DEBUG nova.compute.manager [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1344.339471] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1344.342418] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf2aefc-cb93-4fc9-88be-aafd1288e746 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.350359] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.350721] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beb97e8a-a2d1-4700-8379-0234332f9eac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.371020] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1344.371020] env[62525]: value = "task-1780903" [ 1344.371020] env[62525]: _type = "Task" [ 1344.371020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.384944] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.452070] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780901, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.495345] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780902, 'name': ReconfigVM_Task, 'duration_secs': 0.312504} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.495686] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Reconfigured VM instance instance-00000011 to attach disk [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf/f4cf1732-6b6a-47be-acf4-b127bc4b9baf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1344.496943] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e159eb0-ec7f-4565-9a8d-7924cf64e9c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.507121] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1344.507121] env[62525]: value = "task-1780904" [ 1344.507121] env[62525]: _type = "Task" [ 1344.507121] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.525269] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780904, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.619424] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Received event network-vif-plugged-cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.619424] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Acquiring lock "84fbb408-7810-4166-a53e-242d51f60322-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.620059] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Lock "84fbb408-7810-4166-a53e-242d51f60322-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.620302] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Lock "84fbb408-7810-4166-a53e-242d51f60322-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.621023] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] No waiting events found dispatching network-vif-plugged-cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1344.621023] env[62525]: WARNING nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Received unexpected event network-vif-plugged-cf67116f-5315-4381-89ee-63b52941270b for instance with vm_state building and task_state spawning. [ 1344.621023] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.621341] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing instance network info cache due to event network-changed-cf41a826-2546-4877-b604-5fd32f6cc102. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1344.621497] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Acquiring lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.621674] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Acquired lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.622279] env[62525]: DEBUG nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Refreshing network info cache for port cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1344.739512] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f0a3b5-b914-5e0a-c69b-7ef7e571c923, 'name': SearchDatastore_Task, 'duration_secs': 0.013708} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.740031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.740300] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1344.740713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.741395] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.742338] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.742338] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cc56370-0761-4cab-bac8-5c3401ee9167 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.754169] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.754515] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1344.755288] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ef24668-56eb-425a-9b92-6a99024bc71c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.762119] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1344.762119] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c5076a-843b-244a-e954-1e8002799551" [ 1344.762119] env[62525]: _type = "Task" [ 1344.762119] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.774827] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c5076a-843b-244a-e954-1e8002799551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.819103] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.886271] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780903, 'name': PowerOffVM_Task, 'duration_secs': 0.159459} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.886666] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1344.886809] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1344.887680] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e548f87-3c79-4eaa-8391-fccb2fcef6a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.911684] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1344.912051] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1344.912554] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleting the datastore file [datastore1] 8b41bff7-137f-489c-bb88-7487eb8e97cb {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.912554] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-493143ab-99ab-41f9-b476-3b5bf753e57b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.919516] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1344.919516] env[62525]: value = "task-1780906" [ 1344.919516] env[62525]: _type = "Task" [ 1344.919516] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.932756] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.947776] env[62525]: DEBUG oslo_vmware.api [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780901, 'name': PowerOnVM_Task, 'duration_secs': 0.794864} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.948168] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1344.948315] env[62525]: INFO nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1344.949595] env[62525]: DEBUG nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1344.950301] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d53d841-53c7-4542-9b65-bcf847f40ad2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.018423] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780904, 'name': Rename_Task, 'duration_secs': 0.154408} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.021868] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1345.022963] env[62525]: DEBUG nova.compute.manager [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Received event network-vif-plugged-d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1345.023178] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] Acquiring lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.023438] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.023535] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.023697] env[62525]: DEBUG nova.compute.manager [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] No waiting events found dispatching network-vif-plugged-d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1345.023860] env[62525]: WARNING nova.compute.manager [req-4ec75e38-f296-4ccf-99eb-ab56586403af req-4625e305-60e5-4b46-81b5-1d4c74385b4e service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Received unexpected event network-vif-plugged-d5ec0210-a571-4097-b7ed-766ddd21f11e for instance with vm_state building and task_state spawning. [ 1345.025021] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67bf7f61-20de-4d45-8844-7a16881fc903 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.032887] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Waiting for the task: (returnval){ [ 1345.032887] env[62525]: value = "task-1780907" [ 1345.032887] env[62525]: _type = "Task" [ 1345.032887] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.043176] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.101585] env[62525]: DEBUG nova.network.neutron [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.116381] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb424e39-09d8-47d7-af90-162f0777001a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.129012] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6c5c6b-2ed4-425b-b13c-fa70e3811f9a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.165158] env[62525]: INFO nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Port cf41a826-2546-4877-b604-5fd32f6cc102 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1345.165502] env[62525]: DEBUG nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.167413] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddc5737-a91f-4131-8f35-54ed49bf0b78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.177067] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07310f38-565d-471b-9136-a80e257275c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.193873] env[62525]: DEBUG nova.compute.provider_tree [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.211597] env[62525]: DEBUG nova.network.neutron [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Updating instance_info_cache with network_info: [{"id": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "address": "fa:16:3e:f9:6f:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ec0210-a5", "ovs_interfaceid": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.273447] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c5076a-843b-244a-e954-1e8002799551, 'name': SearchDatastore_Task, 'duration_secs': 0.01222} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.274410] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ca84948-e394-46fd-9824-952ac21c34ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.280519] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1345.280519] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e92f71-ac8d-9e11-7838-94f4545e5a49" [ 1345.280519] env[62525]: _type = "Task" [ 1345.280519] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.288842] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e92f71-ac8d-9e11-7838-94f4545e5a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.304981] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1345.333323] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1345.333547] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1345.333833] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.333968] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1345.334127] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.334353] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1345.334629] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1345.334918] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1345.335108] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1345.335302] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1345.335485] env[62525]: DEBUG nova.virt.hardware [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1345.336419] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eec58d2-ef42-4f23-9514-f972e8de8f88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.344571] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52192f48-8f98-45e6-8297-b9ec430d15e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.434315] env[62525]: DEBUG oslo_vmware.api [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1780906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17534} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.434315] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1345.434315] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1345.434315] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1345.434315] env[62525]: INFO nova.compute.manager [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1345.434539] env[62525]: DEBUG oslo.service.loopingcall [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.434539] env[62525]: DEBUG nova.compute.manager [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1345.434539] env[62525]: DEBUG nova.network.neutron [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1345.458825] env[62525]: DEBUG nova.network.neutron [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1345.475604] env[62525]: INFO nova.compute.manager [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Took 33.46 seconds to build instance. [ 1345.543574] env[62525]: DEBUG oslo_vmware.api [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Task: {'id': task-1780907, 'name': PowerOnVM_Task, 'duration_secs': 0.464599} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.544072] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1345.544202] env[62525]: DEBUG nova.compute.manager [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1345.544988] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ea02ea-dc00-4b04-9f61-ca9e056b0c47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.605549] env[62525]: INFO nova.compute.manager [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 1.60 seconds to deallocate network for instance. [ 1345.672167] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Releasing lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.672167] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Received event network-changed-cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1345.672167] env[62525]: DEBUG nova.compute.manager [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Refreshing instance network info cache due to event network-changed-cf67116f-5315-4381-89ee-63b52941270b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1345.672378] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Acquiring lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.672519] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Acquired lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.672675] env[62525]: DEBUG nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Refreshing network info cache for port cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1345.713372] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Releasing lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.713520] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Instance network_info: |[{"id": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "address": "fa:16:3e:f9:6f:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ec0210-a5", "ovs_interfaceid": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1345.713935] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:6f:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5ec0210-a571-4097-b7ed-766ddd21f11e', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.727159] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Creating folder: Project (d736ea6d81104e94a75b207da00008d9). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.729382] env[62525]: ERROR nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [req-5d915cfc-bb24-42ad-81d8-6de5bd3beb2a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5d915cfc-bb24-42ad-81d8-6de5bd3beb2a"}]} [ 1345.730040] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-957e29b3-586e-4d3e-b4ae-807d73c49252 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.747721] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Created folder: Project (d736ea6d81104e94a75b207da00008d9) in parent group-v369553. [ 1345.747930] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Creating folder: Instances. Parent ref: group-v369621. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.749430] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4784724e-6df4-42ad-a070-4ce817f19483 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.757748] env[62525]: DEBUG nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1345.761152] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Created folder: Instances in parent group-v369621. [ 1345.761630] env[62525]: DEBUG oslo.service.loopingcall [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.761630] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.761829] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90f51bbe-2b24-4a8e-a575-e42351e35ed5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.781749] env[62525]: DEBUG nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1345.782063] env[62525]: DEBUG nova.compute.provider_tree [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.790116] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.790116] env[62525]: value = "task-1780910" [ 1345.790116] env[62525]: _type = "Task" [ 1345.790116] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.793847] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e92f71-ac8d-9e11-7838-94f4545e5a49, 'name': SearchDatastore_Task, 'duration_secs': 0.017721} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.797177] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.797439] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 84fbb408-7810-4166-a53e-242d51f60322/84fbb408-7810-4166-a53e-242d51f60322.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.798405] env[62525]: DEBUG nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1345.800398] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bae0c18-fad4-4990-a672-c327729825ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.809203] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780910, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.811032] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1345.811032] env[62525]: value = "task-1780911" [ 1345.811032] env[62525]: _type = "Task" [ 1345.811032] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.820909] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.831143] env[62525]: DEBUG nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1345.923302] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Successfully updated port: 1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1345.962561] env[62525]: DEBUG nova.network.neutron [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.977935] env[62525]: DEBUG oslo_concurrency.lockutils [None req-774cd18a-89fc-4eff-b72a-f38dc7e63a72 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.163s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.068833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.115208] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.239643] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "e3255df2-2de0-4668-ad7b-a864ea680b44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.240080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.240080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.240711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.240711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.244657] env[62525]: INFO nova.compute.manager [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Terminating instance [ 1346.249047] env[62525]: DEBUG nova.compute.manager [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.249304] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.250388] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776d41f6-4c06-4737-b571-7b65d678eb83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.265565] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.266097] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e5e9e07-ca4b-4d84-b234-1f7ef35ea9f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.277894] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1346.277894] env[62525]: value = "task-1780912" [ 1346.277894] env[62525]: _type = "Task" [ 1346.277894] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.294719] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.318057] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780910, 'name': CreateVM_Task, 'duration_secs': 0.437726} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.326947] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1346.331588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.331588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.331940] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1346.332901] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df3c4659-7571-4ef4-80f2-6c7e0813d3d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.346154] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780911, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.355228] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1346.355228] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b85f02-4c3d-309f-aeb1-982f0195c5ca" [ 1346.355228] env[62525]: _type = "Task" [ 1346.355228] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.368356] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b85f02-4c3d-309f-aeb1-982f0195c5ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.426346] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.426518] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquired lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.426724] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.467290] env[62525]: INFO nova.compute.manager [-] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Took 1.04 seconds to deallocate network for instance. [ 1346.481247] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1346.533208] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdf1eac-a363-4e5a-81e3-a47bb588a94a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.545187] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6393b02e-8fae-4f21-ad64-a6106b2fce14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.582730] env[62525]: DEBUG nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Updated VIF entry in instance network info cache for port cf67116f-5315-4381-89ee-63b52941270b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1346.583404] env[62525]: DEBUG nova.network.neutron [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Updating instance_info_cache with network_info: [{"id": "cf67116f-5315-4381-89ee-63b52941270b", "address": "fa:16:3e:4f:56:5e", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf67116f-53", "ovs_interfaceid": "cf67116f-5315-4381-89ee-63b52941270b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.585012] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff2adfe-858b-402f-8a59-8d03b70dca23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.594302] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03ee2ed-ef0d-4ab4-8fc0-7765701d0311 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.611597] env[62525]: DEBUG nova.compute.provider_tree [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.788746] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780912, 'name': PowerOffVM_Task, 'duration_secs': 0.344633} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.789038] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.789202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.789487] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75eca082-bf07-4dd3-837d-6862f4233be7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.814458] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.814737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.832407] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662858} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.832600] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 84fbb408-7810-4166-a53e-242d51f60322/84fbb408-7810-4166-a53e-242d51f60322.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1346.833017] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1346.833232] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7464e6f9-9ae5-4299-b7d3-d5cf530d762e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.840115] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1346.840115] env[62525]: value = "task-1780914" [ 1346.840115] env[62525]: _type = "Task" [ 1346.840115] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.851429] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.866489] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b85f02-4c3d-309f-aeb1-982f0195c5ca, 'name': SearchDatastore_Task, 'duration_secs': 0.063662} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.867892] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.868145] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.868415] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.868510] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.868782] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.868984] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.869200] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.869541] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleting the datastore file [datastore1] e3255df2-2de0-4668-ad7b-a864ea680b44 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.869777] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49f31200-7d2b-4c4a-ba4b-bd24aa44a9cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.871697] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b2fb2a0-ea8f-4733-a8be-1ead3a0a09e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.881973] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1346.881973] env[62525]: value = "task-1780915" [ 1346.881973] env[62525]: _type = "Task" [ 1346.881973] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.883665] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.883807] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.891056] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fce00e6b-3141-49f4-8b2a-696cbc66f97c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.900232] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.901978] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1346.901978] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52151ff3-48bc-3ca4-e1d9-54b619cc40f8" [ 1346.901978] env[62525]: _type = "Task" [ 1346.901978] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.911499] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52151ff3-48bc-3ca4-e1d9-54b619cc40f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.970351] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1346.977482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.006063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.092589] env[62525]: DEBUG oslo_concurrency.lockutils [req-6b335303-848f-48b9-bcef-89bf585bb8a4 req-d5e62624-81c3-47a8-8f74-3fc5172636ea service nova] Releasing lock "refresh_cache-84fbb408-7810-4166-a53e-242d51f60322" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.114660] env[62525]: DEBUG nova.scheduler.client.report [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1347.144638] env[62525]: DEBUG nova.network.neutron [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Updating instance_info_cache with network_info: [{"id": "1fb47b20-32dd-44c9-a49e-4917205e9809", "address": "fa:16:3e:69:62:f0", "network": {"id": "5568cbf7-e743-44cd-8685-d105a37a075c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-45092182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10ccf8b9c79d4833b636182bd8406921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb47b20-32", "ovs_interfaceid": "1fb47b20-32dd-44c9-a49e-4917205e9809", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.351475] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128644} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.351764] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.352569] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429a6b62-b556-40bf-8be1-110b9306b5b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.383517] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 84fbb408-7810-4166-a53e-242d51f60322/84fbb408-7810-4166-a53e-242d51f60322.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.384135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.384531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.384770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.385041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.385301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.387280] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c090f130-69c8-4290-8bc4-ea7cd3163293 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.404332] env[62525]: INFO nova.compute.manager [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Terminating instance [ 1347.410418] env[62525]: DEBUG nova.compute.manager [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1347.410711] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1347.415060] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc96a544-5ffe-4e88-9649-89937902e1dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.420231] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1347.420231] env[62525]: value = "task-1780916" [ 1347.420231] env[62525]: _type = "Task" [ 1347.420231] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.423904] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.433255] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.433585] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52151ff3-48bc-3ca4-e1d9-54b619cc40f8, 'name': SearchDatastore_Task, 'duration_secs': 0.022879} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.433827] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57914d5f-95ed-4ce0-a369-14183ea788d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.438702] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab59754c-57ff-42c4-8fe5-cfbbcff80764 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.445844] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.449524] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1347.449524] env[62525]: value = "task-1780917" [ 1347.449524] env[62525]: _type = "Task" [ 1347.449524] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.450612] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1347.450612] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521efa2a-76ba-0076-5541-aab647f11f04" [ 1347.450612] env[62525]: _type = "Task" [ 1347.450612] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.460037] env[62525]: DEBUG nova.compute.manager [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1347.460243] env[62525]: DEBUG nova.compute.manager [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing instance network info cache due to event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1347.460506] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.460665] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.461269] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.467888] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780917, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.471650] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521efa2a-76ba-0076-5541-aab647f11f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.604937] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Received event network-changed-d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1347.604937] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Refreshing instance network info cache due to event network-changed-d5ec0210-a571-4097-b7ed-766ddd21f11e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1347.604937] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Acquiring lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.604937] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Acquired lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.604937] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Refreshing network info cache for port d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.621468] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.351s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.622047] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1347.625365] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.098s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.627474] env[62525]: INFO nova.compute.claims [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.647394] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Releasing lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.647757] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Instance network_info: |[{"id": "1fb47b20-32dd-44c9-a49e-4917205e9809", "address": "fa:16:3e:69:62:f0", "network": {"id": "5568cbf7-e743-44cd-8685-d105a37a075c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-45092182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10ccf8b9c79d4833b636182bd8406921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb47b20-32", "ovs_interfaceid": "1fb47b20-32dd-44c9-a49e-4917205e9809", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1347.648684] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:62:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f40f5c4-c146-449c-884d-6f884dcf2acf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fb47b20-32dd-44c9-a49e-4917205e9809', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1347.657910] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Creating folder: Project (10ccf8b9c79d4833b636182bd8406921). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1347.658744] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa11dca9-7465-4d22-b666-b4ee26b10536 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.673675] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Created folder: Project (10ccf8b9c79d4833b636182bd8406921) in parent group-v369553. [ 1347.673992] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Creating folder: Instances. Parent ref: group-v369624. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1347.674529] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1a5b1e0-5b30-4f8c-9731-17851006ec77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.686228] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Created folder: Instances in parent group-v369624. [ 1347.686228] env[62525]: DEBUG oslo.service.loopingcall [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.687109] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1347.687377] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bee57993-3977-464b-bac1-cb702fa1019b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.714851] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1347.714851] env[62525]: value = "task-1780920" [ 1347.714851] env[62525]: _type = "Task" [ 1347.714851] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.728182] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780920, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.899181] env[62525]: DEBUG oslo_vmware.api [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.623944} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.899459] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.899679] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.899904] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.900121] env[62525]: INFO nova.compute.manager [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1347.900481] env[62525]: DEBUG oslo.service.loopingcall [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.900587] env[62525]: DEBUG nova.compute.manager [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.900701] env[62525]: DEBUG nova.network.neutron [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.934586] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780916, 'name': ReconfigVM_Task, 'duration_secs': 0.513743} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.934747] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 84fbb408-7810-4166-a53e-242d51f60322/84fbb408-7810-4166-a53e-242d51f60322.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1347.935434] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30f2a7b8-0e82-4bbc-9a0f-fa33807217aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.942140] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1347.942140] env[62525]: value = "task-1780921" [ 1347.942140] env[62525]: _type = "Task" [ 1347.942140] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.953886] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780921, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.968466] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780917, 'name': PowerOffVM_Task, 'duration_secs': 0.315036} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.972719] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1347.973169] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1347.973217] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521efa2a-76ba-0076-5541-aab647f11f04, 'name': SearchDatastore_Task, 'duration_secs': 0.020986} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.973760] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61bb40fc-f445-4cd8-89a7-12867e1d6b50 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.975462] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.975713] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a1d1337f-3c41-4c1c-812b-aa10f2a680a8/a1d1337f-3c41-4c1c-812b-aa10f2a680a8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1347.977957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ee275a1-a584-45e7-9e66-3f9ddd636a0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.985847] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1347.985847] env[62525]: value = "task-1780923" [ 1347.985847] env[62525]: _type = "Task" [ 1347.985847] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.994032] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.052600] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1348.053247] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1348.054093] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleting the datastore file [datastore1] 1277dac8-3a23-4de8-93c7-c967b0eaf6ba {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1348.054093] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d6659e6-e17c-470c-9868-17133165ffe8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.061182] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1348.061182] env[62525]: value = "task-1780924" [ 1348.061182] env[62525]: _type = "Task" [ 1348.061182] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.076412] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.077123] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.135023] env[62525]: DEBUG nova.compute.utils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1348.141025] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1348.141025] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1348.172245] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "82ea280a-4e1b-4fac-a634-7f79ce731564" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.172384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.172654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.172773] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.172946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.175771] env[62525]: INFO nova.compute.manager [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Terminating instance [ 1348.177706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.178014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.179751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.180056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.180305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.182742] env[62525]: DEBUG nova.compute.manager [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1348.183015] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1348.184802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6915ed-0ebc-476b-9073-acc01a8ea6b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.191220] env[62525]: INFO nova.compute.manager [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Terminating instance [ 1348.194917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "refresh_cache-f4cf1732-6b6a-47be-acf4-b127bc4b9baf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.195344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquired lock "refresh_cache-f4cf1732-6b6a-47be-acf4-b127bc4b9baf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.195344] env[62525]: DEBUG nova.network.neutron [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1348.204451] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1348.205776] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e35e6af-a70b-44c1-b016-c6ee3a57481d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.213445] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1348.213445] env[62525]: value = "task-1780925" [ 1348.213445] env[62525]: _type = "Task" [ 1348.213445] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.229307] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.232406] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780920, 'name': CreateVM_Task, 'duration_secs': 0.458112} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.232406] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1348.233092] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.233357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.234497] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1348.234497] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f62f5e1-01b2-4a9c-ba77-bd8232da384d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.240118] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1348.240118] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e19f95-188b-8139-4666-4cb96ca4958a" [ 1348.240118] env[62525]: _type = "Task" [ 1348.240118] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.249975] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e19f95-188b-8139-4666-4cb96ca4958a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.252458] env[62525]: DEBUG nova.policy [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40b60becd85b4459a6f6abc0aa553551', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a033eda5a6b49fe8bd4cd2c076cb4b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1348.454222] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780921, 'name': Rename_Task, 'duration_secs': 0.177788} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.454587] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.454828] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4dcc157-2e56-4dbe-ae28-2e85b8524ea3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.462717] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1348.462717] env[62525]: value = "task-1780926" [ 1348.462717] env[62525]: _type = "Task" [ 1348.462717] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.463569] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updated VIF entry in instance network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.463898] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.483018] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780926, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.503108] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780923, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.575961] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.580298] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Updated VIF entry in instance network info cache for port d5ec0210-a571-4097-b7ed-766ddd21f11e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.580677] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Updating instance_info_cache with network_info: [{"id": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "address": "fa:16:3e:f9:6f:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ec0210-a5", "ovs_interfaceid": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.641234] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1348.726069] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780925, 'name': PowerOffVM_Task, 'duration_secs': 0.333848} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.727835] env[62525]: DEBUG nova.network.neutron [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1348.729265] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1348.729472] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1348.730546] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-640071f6-3113-4922-8d83-62ade4aa6321 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.755133] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e19f95-188b-8139-4666-4cb96ca4958a, 'name': SearchDatastore_Task, 'duration_secs': 0.012404} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.755556] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.755856] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1348.756214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.756409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.756688] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1348.758679] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f59ee8b9-d2bf-4329-bb4d-37db75ff12b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.774522] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1348.774842] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1348.775900] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d82dda98-1e33-4c9a-be5a-07f80671b941 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.787828] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1348.787828] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d475d1-b9a4-b326-ec28-03f6fd02ba72" [ 1348.787828] env[62525]: _type = "Task" [ 1348.787828] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.789635] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Successfully created port: 945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1348.802941] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d475d1-b9a4-b326-ec28-03f6fd02ba72, 'name': SearchDatastore_Task, 'duration_secs': 0.013223} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.804857] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1348.805053] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1348.805227] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleting the datastore file [datastore1] 82ea280a-4e1b-4fac-a634-7f79ce731564 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1348.806314] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f45606-d008-47ef-96e1-b9bba94c2ab4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.808023] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc4c0ed6-10cc-4330-ab37-acabb9fc5905 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.812899] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1348.812899] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521268f4-916d-0983-ba9f-ea219d721a98" [ 1348.812899] env[62525]: _type = "Task" [ 1348.812899] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.814664] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1348.814664] env[62525]: value = "task-1780928" [ 1348.814664] env[62525]: _type = "Task" [ 1348.814664] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.828723] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521268f4-916d-0983-ba9f-ea219d721a98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.832107] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.864640] env[62525]: DEBUG nova.network.neutron [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.900856] env[62525]: DEBUG nova.network.neutron [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.975109] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.975109] env[62525]: DEBUG nova.compute.manager [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Received event network-vif-deleted-cf41a826-2546-4877-b604-5fd32f6cc102 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1348.975267] env[62525]: DEBUG nova.compute.manager [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1348.975553] env[62525]: DEBUG nova.compute.manager [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing instance network info cache due to event network-changed-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1348.975627] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Acquiring lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.975744] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Acquired lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.975990] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Refreshing network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1348.977204] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780926, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.008077] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780923, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618668} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.008077] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a1d1337f-3c41-4c1c-812b-aa10f2a680a8/a1d1337f-3c41-4c1c-812b-aa10f2a680a8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1349.008077] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1349.008077] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab3c8363-20b5-478d-ad63-47ef591bdfbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.015298] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1349.015298] env[62525]: value = "task-1780929" [ 1349.015298] env[62525]: _type = "Task" [ 1349.015298] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.031431] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.080396] env[62525]: DEBUG oslo_vmware.api [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1780924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.52615} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.080396] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.080396] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1349.080396] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1349.080396] env[62525]: INFO nova.compute.manager [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1349.080627] env[62525]: DEBUG oslo.service.loopingcall [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.080627] env[62525]: DEBUG nova.compute.manager [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1349.080687] env[62525]: DEBUG nova.network.neutron [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1349.083416] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Releasing lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.083746] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Received event network-vif-plugged-1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.083978] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Acquiring lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.084215] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.084396] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.084559] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] No waiting events found dispatching network-vif-plugged-1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1349.084724] env[62525]: WARNING nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Received unexpected event network-vif-plugged-1fb47b20-32dd-44c9-a49e-4917205e9809 for instance with vm_state building and task_state spawning. [ 1349.084878] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Received event network-changed-1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.085048] env[62525]: DEBUG nova.compute.manager [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Refreshing instance network info cache due to event network-changed-1fb47b20-32dd-44c9-a49e-4917205e9809. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1349.085239] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Acquiring lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.085374] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Acquired lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.085562] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Refreshing network info cache for port 1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.325242] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521268f4-916d-0983-ba9f-ea219d721a98, 'name': SearchDatastore_Task, 'duration_secs': 0.014888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.328920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.329353] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c70cf2f1-77a9-4eff-981f-9d72caa82c7b/c70cf2f1-77a9-4eff-981f-9d72caa82c7b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1349.330449] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f79b7887-e775-404f-a831-67e65d54f955 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.344094] env[62525]: DEBUG oslo_vmware.api [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1780928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247258} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.344094] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.344094] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1349.344094] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1349.344094] env[62525]: INFO nova.compute.manager [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1349.345381] env[62525]: DEBUG oslo.service.loopingcall [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.345381] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1349.345381] env[62525]: value = "task-1780930" [ 1349.345381] env[62525]: _type = "Task" [ 1349.345381] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.345381] env[62525]: DEBUG nova.compute.manager [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1349.345381] env[62525]: DEBUG nova.network.neutron [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1349.361052] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.370398] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Releasing lock "refresh_cache-f4cf1732-6b6a-47be-acf4-b127bc4b9baf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.370398] env[62525]: DEBUG nova.compute.manager [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1349.370398] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1349.370398] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f3cbb2-cd3e-4fb7-a1c4-57117859e1e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.373640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee07d27-9fdc-49a6-bfa7-af2bf151168e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.381675] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1349.383901] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e3ef51c-5f92-419a-802e-9e91e37e3988 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.386784] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d563899-bfae-47ae-b3a9-45bc23db5f92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.397020] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1349.397020] env[62525]: value = "task-1780931" [ 1349.397020] env[62525]: _type = "Task" [ 1349.397020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.423552] env[62525]: INFO nova.compute.manager [-] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Took 1.52 seconds to deallocate network for instance. [ 1349.431246] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6a3f72-95c3-4a39-ad9c-9f8f5e964e9a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.444272] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.448264] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11138af3-c2fe-43c3-aaa7-5255e4b6ed88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.462512] env[62525]: DEBUG nova.compute.provider_tree [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.474442] env[62525]: DEBUG oslo_vmware.api [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780926, 'name': PowerOnVM_Task, 'duration_secs': 0.682923} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.475463] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1349.475673] env[62525]: INFO nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1349.475848] env[62525]: DEBUG nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1349.476671] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cc5eeb-32a8-4672-a8a4-a95e9ea9134f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.527897] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074317} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.530363] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.532069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a830327-730e-4b7e-a358-4efbec002164 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.555350] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] a1d1337f-3c41-4c1c-812b-aa10f2a680a8/a1d1337f-3c41-4c1c-812b-aa10f2a680a8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.556402] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab5cdd52-e8db-454c-9f0b-e6ce12195d3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.581797] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1349.581797] env[62525]: value = "task-1780932" [ 1349.581797] env[62525]: _type = "Task" [ 1349.581797] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.595902] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.656124] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1349.689945] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1349.690505] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1349.690505] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.690677] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1349.690729] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.690919] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1349.691531] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1349.691739] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1349.691965] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1349.692222] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1349.692436] env[62525]: DEBUG nova.virt.hardware [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1349.693433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80126cf1-c34e-4c6e-9563-5b8477fad0b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.704147] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d8bdb4-3915-4352-8814-1b04d3d39831 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.854838] env[62525]: DEBUG nova.network.neutron [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.859792] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780930, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.882636] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updated VIF entry in instance network info cache for port 1790239c-c6c1-47bb-ac87-c96e5a2f2e8d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1349.882994] env[62525]: DEBUG nova.network.neutron [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [{"id": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "address": "fa:16:3e:d0:8d:de", "network": {"id": "d1ee3096-25f7-46c0-bd21-c55342ebaad9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-808870213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6486285375a44318c14aee23e914dcf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1790239c-c6", "ovs_interfaceid": "1790239c-c6c1-47bb-ac87-c96e5a2f2e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.933208] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780931, 'name': PowerOffVM_Task, 'duration_secs': 0.308105} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.933478] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1349.933655] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1349.933898] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b255ac2f-8b04-49f1-8e26-cb40368ac22b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.943022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.960826] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Updated VIF entry in instance network info cache for port 1fb47b20-32dd-44c9-a49e-4917205e9809. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1349.961295] env[62525]: DEBUG nova.network.neutron [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Updating instance_info_cache with network_info: [{"id": "1fb47b20-32dd-44c9-a49e-4917205e9809", "address": "fa:16:3e:69:62:f0", "network": {"id": "5568cbf7-e743-44cd-8685-d105a37a075c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-45092182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10ccf8b9c79d4833b636182bd8406921", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f40f5c4-c146-449c-884d-6f884dcf2acf", "external-id": "nsx-vlan-transportzone-240", "segmentation_id": 240, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb47b20-32", "ovs_interfaceid": "1fb47b20-32dd-44c9-a49e-4917205e9809", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.966242] env[62525]: DEBUG nova.scheduler.client.report [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.969224] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1349.969372] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1349.969551] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Deleting the datastore file [datastore1] f4cf1732-6b6a-47be-acf4-b127bc4b9baf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.970964] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f706b5ec-a589-4727-8ccc-cc8fa3e9e158 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.974429] env[62525]: DEBUG nova.compute.manager [req-3f603007-3496-4fb3-9d09-1e3be545fd18 req-8ca45421-a917-458f-bd77-4a25cd7e679e service nova] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Received event network-vif-deleted-a9e5d810-d294-43e0-8f03-aaf8ca59d0dd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.974429] env[62525]: DEBUG nova.compute.manager [req-3f603007-3496-4fb3-9d09-1e3be545fd18 req-8ca45421-a917-458f-bd77-4a25cd7e679e service nova] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Received event network-vif-deleted-cf53c0bd-6b6d-4e88-b23e-60c86646c0be {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.980022] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for the task: (returnval){ [ 1349.980022] env[62525]: value = "task-1780934" [ 1349.980022] env[62525]: _type = "Task" [ 1349.980022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.993525] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.998273] env[62525]: INFO nova.compute.manager [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Took 35.92 seconds to build instance. [ 1350.082973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "d38bbd59-b40c-4965-b823-caefc93e2568" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.083287] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.083499] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.084161] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.084396] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.090329] env[62525]: INFO nova.compute.manager [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Terminating instance [ 1350.093051] env[62525]: DEBUG nova.compute.manager [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1350.093319] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1350.094322] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21163c7-d3e6-4cc3-ba8b-e0764e026f80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.101193] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.105538] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1350.106789] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16ea1d40-c70e-4723-a982-3f4931e40323 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.146210] env[62525]: DEBUG nova.compute.manager [req-ab00df03-9e69-4e2e-b5fd-af730e5d0cc4 req-ee6e78cc-8cbc-4038-9e31-d7a6d6f7b625 service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Received event network-vif-deleted-4a5c70c1-3224-4684-a897-549395a4ae84 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.146557] env[62525]: INFO nova.compute.manager [req-ab00df03-9e69-4e2e-b5fd-af730e5d0cc4 req-ee6e78cc-8cbc-4038-9e31-d7a6d6f7b625 service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Neutron deleted interface 4a5c70c1-3224-4684-a897-549395a4ae84; detaching it from the instance and deleting it from the info cache [ 1350.146880] env[62525]: DEBUG nova.network.neutron [req-ab00df03-9e69-4e2e-b5fd-af730e5d0cc4 req-ee6e78cc-8cbc-4038-9e31-d7a6d6f7b625 service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.191362] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1350.191590] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1350.191766] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleting the datastore file [datastore1] d38bbd59-b40c-4965-b823-caefc93e2568 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1350.192416] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5584eb01-71c0-473a-83e1-2e0f022855eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.201026] env[62525]: DEBUG oslo_vmware.api [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1350.201026] env[62525]: value = "task-1780936" [ 1350.201026] env[62525]: _type = "Task" [ 1350.201026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.207113] env[62525]: DEBUG oslo_vmware.api [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.268406] env[62525]: DEBUG nova.network.neutron [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.360889] env[62525]: INFO nova.compute.manager [-] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Took 1.28 seconds to deallocate network for instance. [ 1350.361578] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62228} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.363472] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c70cf2f1-77a9-4eff-981f-9d72caa82c7b/c70cf2f1-77a9-4eff-981f-9d72caa82c7b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1350.363695] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1350.366505] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43d5425d-7a24-41f3-93ac-5f59d14e45c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.373627] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1350.373627] env[62525]: value = "task-1780937" [ 1350.373627] env[62525]: _type = "Task" [ 1350.373627] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.382071] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780937, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.385701] env[62525]: DEBUG oslo_concurrency.lockutils [req-c72eee36-a16c-4095-91f7-eeac46b2c7e8 req-e52f9374-2396-4551-9792-d401cf26c08a service nova] Releasing lock "refresh_cache-5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.464388] env[62525]: DEBUG oslo_concurrency.lockutils [req-800bc4fe-f8d3-407e-914e-0d352303f7d1 req-f5a5f304-d89a-419c-9e49-1f66b8e9672b service nova] Releasing lock "refresh_cache-c70cf2f1-77a9-4eff-981f-9d72caa82c7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.472595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.847s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.473245] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1350.479405] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Successfully updated port: 945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1350.480476] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.318s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.482694] env[62525]: INFO nova.compute.claims [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1350.499361] env[62525]: DEBUG oslo_vmware.api [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Task: {'id': task-1780934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33323} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.499877] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1350.500109] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1350.500373] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1350.500570] env[62525]: INFO nova.compute.manager [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1350.501112] env[62525]: DEBUG oslo.service.loopingcall [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.501201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e328532f-4ede-4826-9ddc-289624516bea tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.171s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.501344] env[62525]: DEBUG nova.compute.manager [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1350.501438] env[62525]: DEBUG nova.network.neutron [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1350.521122] env[62525]: DEBUG nova.network.neutron [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.597863] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780932, 'name': ReconfigVM_Task, 'duration_secs': 0.595518} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.598051] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Reconfigured VM instance instance-00000019 to attach disk [datastore1] a1d1337f-3c41-4c1c-812b-aa10f2a680a8/a1d1337f-3c41-4c1c-812b-aa10f2a680a8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.598688] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9684c2c9-ec0d-4b41-ac10-66269da38672 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.607511] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1350.607511] env[62525]: value = "task-1780938" [ 1350.607511] env[62525]: _type = "Task" [ 1350.607511] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.618564] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780938, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.651044] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5cef2cb-6e96-4b5e-9e45-655832326007 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.660634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91df1c2d-ad8c-4ee2-94a8-e58992bc4c86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.695116] env[62525]: DEBUG nova.compute.manager [req-ab00df03-9e69-4e2e-b5fd-af730e5d0cc4 req-ee6e78cc-8cbc-4038-9e31-d7a6d6f7b625 service nova] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Detach interface failed, port_id=4a5c70c1-3224-4684-a897-549395a4ae84, reason: Instance 82ea280a-4e1b-4fac-a634-7f79ce731564 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1350.708023] env[62525]: DEBUG oslo_vmware.api [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1780936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311425} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.708838] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1350.709187] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1350.709393] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1350.709819] env[62525]: INFO nova.compute.manager [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1350.710528] env[62525]: DEBUG oslo.service.loopingcall [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.710528] env[62525]: DEBUG nova.compute.manager [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1350.710528] env[62525]: DEBUG nova.network.neutron [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1350.771076] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "84fbb408-7810-4166-a53e-242d51f60322" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.771436] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.771645] env[62525]: DEBUG nova.compute.manager [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.772094] env[62525]: INFO nova.compute.manager [-] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Took 1.43 seconds to deallocate network for instance. [ 1350.772977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b4b413-5ed6-4136-8520-2cbbb4a0d0d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.787757] env[62525]: DEBUG nova.compute.manager [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1350.788316] env[62525]: DEBUG nova.objects.instance [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'flavor' on Instance uuid 84fbb408-7810-4166-a53e-242d51f60322 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.873018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.884927] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780937, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074721} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.885308] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1350.886218] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9f4880-34d4-4647-8c05-5e8560c65bbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.914188] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] c70cf2f1-77a9-4eff-981f-9d72caa82c7b/c70cf2f1-77a9-4eff-981f-9d72caa82c7b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1350.914438] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-077db75a-d716-4d19-8869-229cf61f9b14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.936825] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1350.936825] env[62525]: value = "task-1780939" [ 1350.936825] env[62525]: _type = "Task" [ 1350.936825] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.947245] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780939, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.987835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.987994] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquired lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.988190] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.991582] env[62525]: DEBUG nova.compute.utils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1350.993024] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1350.993024] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1351.010109] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1351.023750] env[62525]: DEBUG nova.network.neutron [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.042690] env[62525]: DEBUG nova.policy [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a25fb102ce7045dd9766a80cac4f30b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5151f26885ad4289a37a8d1ee35d5b7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1351.120220] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780938, 'name': Rename_Task, 'duration_secs': 0.209753} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.120526] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1351.120779] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-570f1713-8c75-462a-a876-08d2dee6e0e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.129062] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1351.129062] env[62525]: value = "task-1780940" [ 1351.129062] env[62525]: _type = "Task" [ 1351.129062] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.139585] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.288376] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.292773] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.293082] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b54511b-2336-469f-8423-ff1206df3576 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.300920] env[62525]: DEBUG oslo_vmware.api [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1351.300920] env[62525]: value = "task-1780941" [ 1351.300920] env[62525]: _type = "Task" [ 1351.300920] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.310335] env[62525]: DEBUG oslo_vmware.api [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.317595] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Successfully created port: c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1351.449962] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780939, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.497623] env[62525]: DEBUG nova.network.neutron [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.500038] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1351.530353] env[62525]: INFO nova.compute.manager [-] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Took 1.03 seconds to deallocate network for instance. [ 1351.547479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.645832] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780940, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.720447] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1351.811966] env[62525]: DEBUG oslo_vmware.api [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780941, 'name': PowerOffVM_Task, 'duration_secs': 0.188852} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.812955] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1351.813285] env[62525]: DEBUG nova.compute.manager [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1351.814130] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3122d328-8ba3-4605-8272-58a63e3a76d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.933714] env[62525]: DEBUG nova.network.neutron [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Updating instance_info_cache with network_info: [{"id": "945dca79-be4e-4671-b11c-271f1a6db036", "address": "fa:16:3e:42:44:c6", "network": {"id": "a26f0bb8-dbfd-472f-a0a1-e1c1011c81fd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1938907248-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a033eda5a6b49fe8bd4cd2c076cb4b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945dca79-be", "ovs_interfaceid": "945dca79-be4e-4671-b11c-271f1a6db036", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.954803] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780939, 'name': ReconfigVM_Task, 'duration_secs': 0.611863} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.958998] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Reconfigured VM instance instance-0000001a to attach disk [datastore1] c70cf2f1-77a9-4eff-981f-9d72caa82c7b/c70cf2f1-77a9-4eff-981f-9d72caa82c7b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1351.960456] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4a7b1c6-3aed-4d9d-afd0-fdcd8d7db3e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.968089] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1351.968089] env[62525]: value = "task-1780942" [ 1351.968089] env[62525]: _type = "Task" [ 1351.968089] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.981135] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780942, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.004540] env[62525]: INFO nova.compute.manager [-] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Took 1.29 seconds to deallocate network for instance. [ 1352.037964] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.132388] env[62525]: DEBUG nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Received event network-vif-plugged-945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1352.133580] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Acquiring lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.133580] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.133825] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.133825] env[62525]: DEBUG nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] No waiting events found dispatching network-vif-plugged-945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1352.133955] env[62525]: WARNING nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Received unexpected event network-vif-plugged-945dca79-be4e-4671-b11c-271f1a6db036 for instance with vm_state building and task_state spawning. [ 1352.134146] env[62525]: DEBUG nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Received event network-changed-945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1352.134302] env[62525]: DEBUG nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Refreshing instance network info cache due to event network-changed-945dca79-be4e-4671-b11c-271f1a6db036. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1352.134437] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Acquiring lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.147053] env[62525]: DEBUG oslo_vmware.api [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780940, 'name': PowerOnVM_Task, 'duration_secs': 0.61107} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.147146] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1352.147293] env[62525]: INFO nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Took 9.64 seconds to spawn the instance on the hypervisor. [ 1352.147468] env[62525]: DEBUG nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1352.148302] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551935b4-9ba5-4a2b-ac21-12538e65d336 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.204013] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a229678-c228-448b-9775-e3304ea41830 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.213430] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e1bf16-d81f-4faa-9bff-e9a94d56589c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.252329] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bbea7a-e067-4e84-b7e7-315e5e6e9664 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.260392] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe1b8e1-072e-4d0f-ac28-bc7fa887b153 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.277244] env[62525]: DEBUG nova.compute.provider_tree [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.331901] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88ad209c-12e6-4e7a-9a33-b3d05a6ea3d9 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.443684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Releasing lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.443975] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Instance network_info: |[{"id": "945dca79-be4e-4671-b11c-271f1a6db036", "address": "fa:16:3e:42:44:c6", "network": {"id": "a26f0bb8-dbfd-472f-a0a1-e1c1011c81fd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1938907248-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a033eda5a6b49fe8bd4cd2c076cb4b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945dca79-be", "ovs_interfaceid": "945dca79-be4e-4671-b11c-271f1a6db036", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1352.444370] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Acquired lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.444943] env[62525]: DEBUG nova.network.neutron [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Refreshing network info cache for port 945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.446154] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:44:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '945dca79-be4e-4671-b11c-271f1a6db036', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1352.454982] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Creating folder: Project (4a033eda5a6b49fe8bd4cd2c076cb4b9). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1352.456187] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd84daf0-b4e1-462b-96fe-60e3328f5901 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.470156] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Created folder: Project (4a033eda5a6b49fe8bd4cd2c076cb4b9) in parent group-v369553. [ 1352.470156] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Creating folder: Instances. Parent ref: group-v369627. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1352.473426] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04a7030b-3ceb-4407-8472-88204cee585b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.481137] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780942, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.483462] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Created folder: Instances in parent group-v369627. [ 1352.483610] env[62525]: DEBUG oslo.service.loopingcall [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1352.483815] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1352.484027] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fb608b6-3a1c-473f-a264-f6e793635afd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.501158] env[62525]: DEBUG nova.compute.manager [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Received event network-changed {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1352.501490] env[62525]: DEBUG nova.compute.manager [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Refreshing instance network info cache due to event network-changed. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1352.503272] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] Acquiring lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.503272] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] Acquired lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.503272] env[62525]: DEBUG nova.network.neutron [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1352.509640] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1352.509640] env[62525]: value = "task-1780945" [ 1352.509640] env[62525]: _type = "Task" [ 1352.509640] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.514080] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1352.517121] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.523012] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780945, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.546536] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1352.546886] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1352.547093] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.547350] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1352.547554] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.548719] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1352.548719] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1352.548719] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1352.548719] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1352.548719] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1352.548951] env[62525]: DEBUG nova.virt.hardware [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1352.549835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f3d233-fc76-4373-a445-51d23d98d3ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.558679] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f699ff64-cd8c-4b37-86ae-1acaa7aa4af8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.667797] env[62525]: INFO nova.compute.manager [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Took 37.45 seconds to build instance. [ 1352.783027] env[62525]: DEBUG nova.scheduler.client.report [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1352.980773] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780942, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.020133] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780945, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.080264] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.170732] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e7610fb-0f18-48a2-bdce-2ffa8915732b tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.680s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.172457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.092s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.172683] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.172964] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.173483] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.175332] env[62525]: INFO nova.compute.manager [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Terminating instance [ 1353.179206] env[62525]: DEBUG nova.compute.manager [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1353.179487] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1353.180355] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6570c18a-0ab5-439d-82a0-94f4fc13893e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.188396] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.188656] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc08c562-aa42-46a3-9c4f-46c714d8ac2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.200248] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1353.200248] env[62525]: value = "task-1780946" [ 1353.200248] env[62525]: _type = "Task" [ 1353.200248] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.210027] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.220849] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1353.222670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dbbccc-79ec-4cda-99e2-70b3e2005ad1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.229682] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1353.229883] env[62525]: ERROR oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk due to incomplete transfer. [ 1353.230141] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cfba4dbe-7f5d-4453-bce5-692d1326ecf0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.237297] env[62525]: DEBUG oslo_vmware.rw_handles [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c9c7-5902-1208-3fe9-aeab82db08cc/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1353.238921] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Uploaded image a382955a-c365-48c6-8634-872090f3a433 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1353.239737] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1353.239996] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e6025def-6856-43b3-82ff-ccd027c51c49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.249713] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1353.249713] env[62525]: value = "task-1780947" [ 1353.249713] env[62525]: _type = "Task" [ 1353.249713] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.258945] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780947, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.289330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.289856] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1353.292948] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.588s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.293160] env[62525]: DEBUG nova.objects.instance [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lazy-loading 'resources' on Instance uuid 30fbab3d-8141-4d7e-987f-e4f4fc4a1808 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1353.479790] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780942, 'name': Rename_Task, 'duration_secs': 1.207731} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.480104] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.480359] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0057b286-2acb-4b16-b341-d63ae188ef6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.488164] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1353.488164] env[62525]: value = "task-1780948" [ 1353.488164] env[62525]: _type = "Task" [ 1353.488164] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.499233] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.508124] env[62525]: DEBUG nova.network.neutron [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Updated VIF entry in instance network info cache for port 945dca79-be4e-4671-b11c-271f1a6db036. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.508527] env[62525]: DEBUG nova.network.neutron [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Updating instance_info_cache with network_info: [{"id": "945dca79-be4e-4671-b11c-271f1a6db036", "address": "fa:16:3e:42:44:c6", "network": {"id": "a26f0bb8-dbfd-472f-a0a1-e1c1011c81fd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1938907248-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a033eda5a6b49fe8bd4cd2c076cb4b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945dca79-be", "ovs_interfaceid": "945dca79-be4e-4671-b11c-271f1a6db036", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.514235] env[62525]: DEBUG nova.network.neutron [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Updating instance_info_cache with network_info: [{"id": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "address": "fa:16:3e:f9:6f:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ec0210-a5", "ovs_interfaceid": "d5ec0210-a571-4097-b7ed-766ddd21f11e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.520203] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780945, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.543014] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Successfully updated port: c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.675966] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1353.711677] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780946, 'name': PowerOffVM_Task, 'duration_secs': 0.188277} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.711985] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1353.712187] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1353.712453] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2e5ffba-2ec8-46ac-b233-c139fae6b415 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.759623] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780947, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.800630] env[62525]: DEBUG nova.compute.utils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1353.802110] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1353.802300] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1353.866245] env[62525]: DEBUG nova.policy [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eee5a510a514320b8f5eb0a6bf66121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1be24793ee4d83babc07ff8ad5abad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1353.881635] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1353.881871] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1353.882148] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Deleting the datastore file [datastore1] a1d1337f-3c41-4c1c-812b-aa10f2a680a8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.882363] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cb6156c-2bb1-4eb6-bd22-6783a34f602d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.890644] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for the task: (returnval){ [ 1353.890644] env[62525]: value = "task-1780950" [ 1353.890644] env[62525]: _type = "Task" [ 1353.890644] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.902371] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.999012] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780948, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.011315] env[62525]: DEBUG oslo_concurrency.lockutils [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] Releasing lock "refresh_cache-e34ebddc-2192-4975-81d7-0f5c200f114e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.011568] env[62525]: DEBUG nova.compute.manager [req-b8a96fca-1630-4fac-895e-1c7a635df288 req-14088fb1-b442-42b7-a094-677feeb8daa9 service nova] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Received event network-vif-deleted-1a9d7127-13cf-4940-83b3-8cecf0ea3a03 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1354.017538] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9b4c8088-69b5-423a-af07-5e86839d5e5a tempest-ServerExternalEventsTest-88994852 tempest-ServerExternalEventsTest-88994852-project] Releasing lock "refresh_cache-a1d1337f-3c41-4c1c-812b-aa10f2a680a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.024429] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780945, 'name': CreateVM_Task, 'duration_secs': 1.134235} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.024652] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.025424] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.026035] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.026035] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.026372] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62fad257-28ad-4b8f-bfb9-7d128d206491 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.035643] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1354.035643] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5263eeb8-e2cd-fbd8-98d3-3c106b6e406a" [ 1354.035643] env[62525]: _type = "Task" [ 1354.035643] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.046798] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.047060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquired lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.047259] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1354.049022] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263eeb8-e2cd-fbd8-98d3-3c106b6e406a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.201932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.245219] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Successfully created port: ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1354.260765] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780947, 'name': Destroy_Task, 'duration_secs': 0.627812} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.264227] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Destroyed the VM [ 1354.264583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1354.265893] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7c3fa577-65f8-400b-9150-069071247fb3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.273156] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1354.273156] env[62525]: value = "task-1780951" [ 1354.273156] env[62525]: _type = "Task" [ 1354.273156] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.282964] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780951, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.308194] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1354.400291] env[62525]: DEBUG oslo_vmware.api [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Task: {'id': task-1780950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.400544] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.400729] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1354.400906] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1354.401158] env[62525]: INFO nova.compute.manager [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1354.401320] env[62525]: DEBUG oslo.service.loopingcall [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.403725] env[62525]: DEBUG nova.compute.manager [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1354.403839] env[62525]: DEBUG nova.network.neutron [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1354.421119] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abde026-9db9-4aa4-aeca-d9a4fde55737 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.428531] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f982765-d4a0-4935-8343-8019055e1817 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.435780] env[62525]: DEBUG nova.compute.manager [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Received event network-vif-plugged-c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1354.435991] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Acquiring lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.436396] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.436581] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.436892] env[62525]: DEBUG nova.compute.manager [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] No waiting events found dispatching network-vif-plugged-c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1354.437179] env[62525]: WARNING nova.compute.manager [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Received unexpected event network-vif-plugged-c02917b8-9b2b-4fc8-a054-7011866a8326 for instance with vm_state building and task_state spawning. [ 1354.437432] env[62525]: DEBUG nova.compute.manager [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Received event network-changed-c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1354.437610] env[62525]: DEBUG nova.compute.manager [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Refreshing instance network info cache due to event network-changed-c02917b8-9b2b-4fc8-a054-7011866a8326. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1354.437811] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Acquiring lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.466771] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c7e636-221e-4e2d-84fe-0f1a66c1616f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.476859] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabdcd65-c7b3-495c-bf1e-bb90eb429483 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.490725] env[62525]: DEBUG nova.compute.provider_tree [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.500656] env[62525]: DEBUG oslo_vmware.api [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780948, 'name': PowerOnVM_Task, 'duration_secs': 0.60498} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.501607] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.501821] env[62525]: INFO nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1354.502009] env[62525]: DEBUG nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1354.503028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cb1f67-4a9d-45cc-80eb-a2f6f862ffb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.547115] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263eeb8-e2cd-fbd8-98d3-3c106b6e406a, 'name': SearchDatastore_Task, 'duration_secs': 0.026709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.547521] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.547764] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.548198] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.549895] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.549895] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.549895] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f361670d-48ce-4513-a729-1d78fac3c79a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.561757] env[62525]: DEBUG nova.compute.manager [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1354.561757] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.561757] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.563240] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a15ae9-50b0-4bcb-80d0-ab75441a7cf7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.566305] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-325d114b-302f-4e4e-af30-01ca1c3cffb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.576299] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1354.576299] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527fc69e-05c8-9172-cd49-206af9e8651d" [ 1354.576299] env[62525]: _type = "Task" [ 1354.576299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.584638] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527fc69e-05c8-9172-cd49-206af9e8651d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.604663] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1354.784945] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780951, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.811754] env[62525]: DEBUG nova.network.neutron [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Updating instance_info_cache with network_info: [{"id": "c02917b8-9b2b-4fc8-a054-7011866a8326", "address": "fa:16:3e:8e:90:e5", "network": {"id": "da6e6404-3c14-42e8-bcdc-6d1c020ba944", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-496629259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5151f26885ad4289a37a8d1ee35d5b7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02917b8-9b", "ovs_interfaceid": "c02917b8-9b2b-4fc8-a054-7011866a8326", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.994382] env[62525]: DEBUG nova.scheduler.client.report [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.027132] env[62525]: INFO nova.compute.manager [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Took 37.29 seconds to build instance. [ 1355.076961] env[62525]: INFO nova.compute.manager [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] instance snapshotting [ 1355.077378] env[62525]: WARNING nova.compute.manager [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1355.082835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1d232a-808f-4f9c-9e23-05d60a07e7ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.092573] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527fc69e-05c8-9172-cd49-206af9e8651d, 'name': SearchDatastore_Task, 'duration_secs': 0.024392} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.107888] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-128b17ab-809a-403a-aef5-212c79428ee3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.111213] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d753f2-cb24-4284-89d8-04e8a231e370 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.116095] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1355.116095] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fc29a3-f1c0-8292-3c62-ae80e3c98b43" [ 1355.116095] env[62525]: _type = "Task" [ 1355.116095] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.128441] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fc29a3-f1c0-8292-3c62-ae80e3c98b43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.284781] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780951, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.317531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Releasing lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.317531] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Instance network_info: |[{"id": "c02917b8-9b2b-4fc8-a054-7011866a8326", "address": "fa:16:3e:8e:90:e5", "network": {"id": "da6e6404-3c14-42e8-bcdc-6d1c020ba944", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-496629259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5151f26885ad4289a37a8d1ee35d5b7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02917b8-9b", "ovs_interfaceid": "c02917b8-9b2b-4fc8-a054-7011866a8326", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1355.317719] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Acquired lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.317719] env[62525]: DEBUG nova.network.neutron [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Refreshing network info cache for port c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1355.317719] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:90:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c02917b8-9b2b-4fc8-a054-7011866a8326', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1355.325635] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Creating folder: Project (5151f26885ad4289a37a8d1ee35d5b7c). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1355.330029] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1355.336710] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14fce3d3-8dab-4281-8f82-77314861780b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.340587] env[62525]: DEBUG nova.network.neutron [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.349514] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Created folder: Project (5151f26885ad4289a37a8d1ee35d5b7c) in parent group-v369553. [ 1355.349514] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Creating folder: Instances. Parent ref: group-v369630. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1355.349514] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2dae1f4-580f-4e29-a205-56c22073ff65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.359597] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Created folder: Instances in parent group-v369630. [ 1355.359597] env[62525]: DEBUG oslo.service.loopingcall [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1355.359597] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1355.359767] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f23b4ec-d5f0-4df9-a3c9-382eb99e53ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.385140] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1355.385140] env[62525]: value = "task-1780954" [ 1355.385140] env[62525]: _type = "Task" [ 1355.385140] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.387387] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1355.388154] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1355.388330] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1355.388517] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1355.388659] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1355.388799] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1355.389009] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1355.389198] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1355.389370] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1355.389525] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1355.389707] env[62525]: DEBUG nova.virt.hardware [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1355.390565] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805aa81d-2d4d-427b-995c-041baa9f1e65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.406468] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc54e652-093e-4e78-97ed-31ebfc4bb1fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.410405] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780954, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.499450] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.501665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.685s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.501856] env[62525]: DEBUG nova.objects.instance [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1355.531076] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ed4c2fb-e093-4493-a95e-164389905973 tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.603s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.559248] env[62525]: INFO nova.scheduler.client.report [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Deleted allocations for instance 30fbab3d-8141-4d7e-987f-e4f4fc4a1808 [ 1355.586630] env[62525]: DEBUG nova.network.neutron [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Updated VIF entry in instance network info cache for port c02917b8-9b2b-4fc8-a054-7011866a8326. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.587105] env[62525]: DEBUG nova.network.neutron [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Updating instance_info_cache with network_info: [{"id": "c02917b8-9b2b-4fc8-a054-7011866a8326", "address": "fa:16:3e:8e:90:e5", "network": {"id": "da6e6404-3c14-42e8-bcdc-6d1c020ba944", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-496629259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5151f26885ad4289a37a8d1ee35d5b7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02917b8-9b", "ovs_interfaceid": "c02917b8-9b2b-4fc8-a054-7011866a8326", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.625026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1355.629023] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f29d50f2-5c15-4ecd-90f0-1595139db56c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.633040] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fc29a3-f1c0-8292-3c62-ae80e3c98b43, 'name': SearchDatastore_Task, 'duration_secs': 0.01969} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.633040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.633040] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e34ebddc-2192-4975-81d7-0f5c200f114e/e34ebddc-2192-4975-81d7-0f5c200f114e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1355.633040] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfb5edd8-9a99-4e17-9030-0742b1b9d916 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.637332] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1355.637332] env[62525]: value = "task-1780955" [ 1355.637332] env[62525]: _type = "Task" [ 1355.637332] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.641791] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1355.641791] env[62525]: value = "task-1780956" [ 1355.641791] env[62525]: _type = "Task" [ 1355.641791] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.648140] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780955, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.653217] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.784053] env[62525]: DEBUG oslo_vmware.api [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780951, 'name': RemoveSnapshot_Task, 'duration_secs': 1.421407} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.785084] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1355.785084] env[62525]: INFO nova.compute.manager [None req-6c3e0e20-cd1a-4a29-970c-7b4732da9f15 tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 17.42 seconds to snapshot the instance on the hypervisor. [ 1355.845291] env[62525]: INFO nova.compute.manager [-] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Took 1.44 seconds to deallocate network for instance. [ 1355.903538] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780954, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.958686] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.959081] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.959337] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.959562] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.959764] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.963638] env[62525]: INFO nova.compute.manager [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Terminating instance [ 1355.965746] env[62525]: DEBUG nova.compute.manager [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1355.965746] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1355.966818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c623ab-2c76-4e1b-83df-c5e59fb5e64a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.975080] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1355.975320] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cd17a68-f4fc-4e77-83ab-e9d95b9f8d03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.983652] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1355.983652] env[62525]: value = "task-1780957" [ 1355.983652] env[62525]: _type = "Task" [ 1355.983652] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.992391] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.033674] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.052213] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Successfully updated port: ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1356.072520] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d950d19-5c44-411b-8415-9d7f07fe9cf7 tempest-ServerDiagnosticsV248Test-521129931 tempest-ServerDiagnosticsV248Test-521129931-project-member] Lock "30fbab3d-8141-4d7e-987f-e4f4fc4a1808" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.419s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.090274] env[62525]: DEBUG oslo_concurrency.lockutils [req-910940f6-f224-4aff-b627-e24ea707820b req-26242bc1-9ce1-42f6-bbc2-09650831435a service nova] Releasing lock "refresh_cache-2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.155154] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780956, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.158918] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780955, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.352707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.403197] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780954, 'name': CreateVM_Task, 'duration_secs': 1.008316} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.403410] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1356.404167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.404386] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.404758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1356.405098] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a8e45f3-b8ee-4458-b177-ca60d3b018ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.410742] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1356.410742] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527735a0-17fe-4d27-854b-58358df412e8" [ 1356.410742] env[62525]: _type = "Task" [ 1356.410742] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.418217] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527735a0-17fe-4d27-854b-58358df412e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.493900] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780957, 'name': PowerOffVM_Task, 'duration_secs': 0.407488} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.494197] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1356.494994] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1356.494994] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ca58802-ffb2-4ca2-8180-68c5a0a81ce9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.510567] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f13bdc24-4899-45e0-89df-b3eebbce9126 tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.511747] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.591s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.557694] env[62525]: DEBUG nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Received event network-vif-deleted-d5ec0210-a571-4097-b7ed-766ddd21f11e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1356.558072] env[62525]: DEBUG nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Received event network-vif-plugged-ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1356.558072] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Acquiring lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.558723] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.559041] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.559149] env[62525]: DEBUG nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] No waiting events found dispatching network-vif-plugged-ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1356.559642] env[62525]: WARNING nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Received unexpected event network-vif-plugged-ebd6c0fe-5181-41be-a80c-55b3b3d0841d for instance with vm_state building and task_state spawning. [ 1356.559642] env[62525]: DEBUG nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Received event network-changed-ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1356.561923] env[62525]: DEBUG nova.compute.manager [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Refreshing instance network info cache due to event network-changed-ebd6c0fe-5181-41be-a80c-55b3b3d0841d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1356.561923] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Acquiring lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.561923] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Acquired lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.561923] env[62525]: DEBUG nova.network.neutron [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Refreshing network info cache for port ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.561923] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.563258] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.574768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1356.574768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1356.575070] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Deleting the datastore file [datastore1] c70cf2f1-77a9-4eff-981f-9d72caa82c7b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1356.575122] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12a24cee-c458-4635-a970-d069088a8a0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.582297] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for the task: (returnval){ [ 1356.582297] env[62525]: value = "task-1780959" [ 1356.582297] env[62525]: _type = "Task" [ 1356.582297] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.590832] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.650648] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780955, 'name': CreateSnapshot_Task, 'duration_secs': 0.781747} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.650753] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1356.652167] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5278651c-b79c-4628-9063-9ec3bf02a5db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.657188] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.657833] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e34ebddc-2192-4975-81d7-0f5c200f114e/e34ebddc-2192-4975-81d7-0f5c200f114e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1356.658061] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1356.658315] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d9da936-f244-4683-ab9a-8da62f962d41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.671610] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1356.671610] env[62525]: value = "task-1780960" [ 1356.671610] env[62525]: _type = "Task" [ 1356.671610] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.679494] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.923136] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527735a0-17fe-4d27-854b-58358df412e8, 'name': SearchDatastore_Task, 'duration_secs': 0.01104} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.923569] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.923836] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1356.924142] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.924330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.924540] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1356.924826] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86725951-996a-46c8-9bf3-589bb271193f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.933336] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1356.933571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1356.934393] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c87f505-318e-4bed-bf01-37cf3dfb87c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.940368] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1356.940368] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f0105b-2f89-1d42-9150-44dbe1d67281" [ 1356.940368] env[62525]: _type = "Task" [ 1356.940368] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.947352] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f0105b-2f89-1d42-9150-44dbe1d67281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.095417] env[62525]: DEBUG oslo_vmware.api [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Task: {'id': task-1780959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150608} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.095679] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1357.096327] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1357.096817] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1357.096817] env[62525]: INFO nova.compute.manager [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1357.096965] env[62525]: DEBUG oslo.service.loopingcall [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1357.097522] env[62525]: DEBUG nova.compute.manager [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1357.097522] env[62525]: DEBUG nova.network.neutron [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1357.099551] env[62525]: DEBUG nova.network.neutron [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1357.179883] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1357.179883] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4382a211-3486-4684-819d-68419e5e6d95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.193073] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071189} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.194480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.194884] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1357.194884] env[62525]: value = "task-1780961" [ 1357.194884] env[62525]: _type = "Task" [ 1357.194884] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.195904] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd47273-b393-4ebe-8562-26230812568b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.211087] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780961, 'name': CloneVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.232483] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] e34ebddc-2192-4975-81d7-0f5c200f114e/e34ebddc-2192-4975-81d7-0f5c200f114e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.233445] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c64c362e-0085-418f-b04c-1f58d2243761 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.252830] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1357.252830] env[62525]: value = "task-1780962" [ 1357.252830] env[62525]: _type = "Task" [ 1357.252830] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.261560] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780962, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.279949] env[62525]: DEBUG nova.network.neutron [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.450952] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f0105b-2f89-1d42-9150-44dbe1d67281, 'name': SearchDatastore_Task, 'duration_secs': 0.014456} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.451964] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e1bda7-51c9-424d-a25d-12e74bf7e810 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.459724] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1357.459724] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521bb811-e925-d810-5088-2a45bc3e6c11" [ 1357.459724] env[62525]: _type = "Task" [ 1357.459724] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.468364] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521bb811-e925-d810-5088-2a45bc3e6c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.564393] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.564658] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c7603ce8-8471-4813-9faf-3667a205893c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.564658] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.564845] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.564884] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance deef59c8-f710-434d-bddc-f63bb3d518b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565027] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 98334a1b-1a73-408f-93a4-6dc72764ebfc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565200] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565323] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 82ea280a-4e1b-4fac-a634-7f79ce731564 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.565401] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 56cb0d0c-a7dd-4158-8bed-ddff050e0226 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565504] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance aa639aa3-d21c-4923-bc39-56e648c566fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565732] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f93669f2-c59d-4f3f-85a2-a60d714326ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565732] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 7f8392fa-1c11-4180-bda9-057b5cfa058c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.565836] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f4cf1732-6b6a-47be-acf4-b127bc4b9baf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.565955] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (bb89c0ac-8f56-43c6-9f73-fd897be63424): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1357.566243] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 35a2e221-e1c5-49d9-af93-5e5f28c62b8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.566243] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8b41bff7-137f-489c-bb88-7487eb8e97cb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.566371] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance d38bbd59-b40c-4965-b823-caefc93e2568 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.566420] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1277dac8-3a23-4de8-93c7-c967b0eaf6ba is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.566538] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e3255df2-2de0-4668-ad7b-a864ea680b44 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.566661] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 84fbb408-7810-4166-a53e-242d51f60322 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.566762] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance a1d1337f-3c41-4c1c-812b-aa10f2a680a8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1357.566899] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c70cf2f1-77a9-4eff-981f-9d72caa82c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.567026] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e34ebddc-2192-4975-81d7-0f5c200f114e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.567257] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.567376] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 9dfb7d7f-6656-46fd-969e-c692db1ce507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1357.709297] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780961, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.763221] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780962, 'name': ReconfigVM_Task, 'duration_secs': 0.307569} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.763527] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Reconfigured VM instance instance-0000001b to attach disk [datastore1] e34ebddc-2192-4975-81d7-0f5c200f114e/e34ebddc-2192-4975-81d7-0f5c200f114e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1357.764167] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7f066b0-cc5d-4ee9-b312-7d813dde5057 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.770594] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1357.770594] env[62525]: value = "task-1780963" [ 1357.770594] env[62525]: _type = "Task" [ 1357.770594] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.779895] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780963, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.784353] env[62525]: DEBUG oslo_concurrency.lockutils [req-99f67823-2f76-4181-8cb6-e2340a079c57 req-2edf452c-f695-4063-8323-9e7c8c7c8650 service nova] Releasing lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.784353] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.784353] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1357.961439] env[62525]: DEBUG nova.network.neutron [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.973171] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521bb811-e925-d810-5088-2a45bc3e6c11, 'name': SearchDatastore_Task, 'duration_secs': 0.012198} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.973445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.973703] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b/2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1357.973960] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e310a53-b043-4923-883c-97192f7b3686 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.980481] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1357.980481] env[62525]: value = "task-1780964" [ 1357.980481] env[62525]: _type = "Task" [ 1357.980481] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.988589] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.072613] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cfae9bf8-012a-4286-b978-bba8a913bba2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1358.096817] env[62525]: DEBUG nova.compute.manager [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1358.098535] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2f0a36-2c41-4d08-b0be-1d345dc78288 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.215853] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780961, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.280667] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780963, 'name': Rename_Task, 'duration_secs': 0.144475} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.280952] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.281219] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41fa1f74-197b-46e8-b6a9-26c142926865 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.289333] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1358.289333] env[62525]: value = "task-1780965" [ 1358.289333] env[62525]: _type = "Task" [ 1358.289333] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.299491] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.377738] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1358.433680] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.434038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.467548] env[62525]: INFO nova.compute.manager [-] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Took 1.37 seconds to deallocate network for instance. [ 1358.494397] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780964, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.575266] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e3f3fc2c-0060-4521-8aa3-da37209aee81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1358.585930] env[62525]: DEBUG nova.compute.manager [req-529c992b-c12d-412e-a928-aa020c268eff req-8df4d11f-f03d-46d2-8d76-e2097b94fbe9 service nova] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Received event network-vif-deleted-1fb47b20-32dd-44c9-a49e-4917205e9809 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1358.592599] env[62525]: DEBUG nova.network.neutron [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Updating instance_info_cache with network_info: [{"id": "ebd6c0fe-5181-41be-a80c-55b3b3d0841d", "address": "fa:16:3e:3f:d5:58", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebd6c0fe-51", "ovs_interfaceid": "ebd6c0fe-5181-41be-a80c-55b3b3d0841d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.610660] env[62525]: INFO nova.compute.manager [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] instance snapshotting [ 1358.613589] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9af7c1a-65ee-44f7-bbd4-ee519ba92e15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.634635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21c932d-6907-44cf-8626-a23f70852b57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.710437] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780961, 'name': CloneVM_Task, 'duration_secs': 1.43874} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.711125] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Created linked-clone VM from snapshot [ 1358.711444] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7437fff-e04e-4525-af8b-901d9964727a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.718632] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Uploading image eda5f6cb-607d-4d11-8755-4520d23c5b9b {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1358.759355] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1358.759355] env[62525]: value = "vm-369634" [ 1358.759355] env[62525]: _type = "VirtualMachine" [ 1358.759355] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1358.759644] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-86818272-feb8-463a-afa2-c4d6419735c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.766439] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease: (returnval){ [ 1358.766439] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7da46-85a1-6a29-7d4a-d6aa76a0cb3c" [ 1358.766439] env[62525]: _type = "HttpNfcLease" [ 1358.766439] env[62525]: } obtained for exporting VM: (result){ [ 1358.766439] env[62525]: value = "vm-369634" [ 1358.766439] env[62525]: _type = "VirtualMachine" [ 1358.766439] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1358.766765] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the lease: (returnval){ [ 1358.766765] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7da46-85a1-6a29-7d4a-d6aa76a0cb3c" [ 1358.766765] env[62525]: _type = "HttpNfcLease" [ 1358.766765] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1358.773161] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1358.773161] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7da46-85a1-6a29-7d4a-d6aa76a0cb3c" [ 1358.773161] env[62525]: _type = "HttpNfcLease" [ 1358.773161] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1358.799375] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780965, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.976060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.998181] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540789} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.998181] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b/2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1358.998181] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1358.998181] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ade8d9e-850c-4f2a-9ad6-dc8c639363c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.004305] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1359.004305] env[62525]: value = "task-1780967" [ 1359.004305] env[62525]: _type = "Task" [ 1359.004305] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.016109] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.079057] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1359.094450] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "refresh_cache-9dfb7d7f-6656-46fd-969e-c692db1ce507" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.094807] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance network_info: |[{"id": "ebd6c0fe-5181-41be-a80c-55b3b3d0841d", "address": "fa:16:3e:3f:d5:58", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebd6c0fe-51", "ovs_interfaceid": "ebd6c0fe-5181-41be-a80c-55b3b3d0841d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1359.095208] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:d5:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebd6c0fe-5181-41be-a80c-55b3b3d0841d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1359.102831] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Creating folder: Project (ba1be24793ee4d83babc07ff8ad5abad). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1359.103529] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e238991-ff55-4e70-837d-ec088064298f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.113149] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Created folder: Project (ba1be24793ee4d83babc07ff8ad5abad) in parent group-v369553. [ 1359.113343] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Creating folder: Instances. Parent ref: group-v369635. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1359.113560] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-057896fe-da3a-4028-a494-2a05316b4611 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.122010] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Created folder: Instances in parent group-v369635. [ 1359.122241] env[62525]: DEBUG oslo.service.loopingcall [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1359.122414] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1359.122597] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10290891-f2dc-4e99-8438-de47a3638f4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.140931] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1359.140931] env[62525]: value = "task-1780970" [ 1359.140931] env[62525]: _type = "Task" [ 1359.140931] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.144576] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1359.144821] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1abcbde7-5f08-451b-afb8-ee56c9986cb8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.152507] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780970, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.153625] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1359.153625] env[62525]: value = "task-1780971" [ 1359.153625] env[62525]: _type = "Task" [ 1359.153625] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.160866] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.275099] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1359.275099] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7da46-85a1-6a29-7d4a-d6aa76a0cb3c" [ 1359.275099] env[62525]: _type = "HttpNfcLease" [ 1359.275099] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1359.275469] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1359.275469] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7da46-85a1-6a29-7d4a-d6aa76a0cb3c" [ 1359.275469] env[62525]: _type = "HttpNfcLease" [ 1359.275469] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1359.276248] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58946c42-b3c1-480d-b8d1-0c0ae301ead1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.283539] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1359.283716] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1359.350826] env[62525]: DEBUG oslo_vmware.api [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780965, 'name': PowerOnVM_Task, 'duration_secs': 0.600087} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.350826] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1359.350826] env[62525]: INFO nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1359.350826] env[62525]: DEBUG nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1359.351471] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83517664-ad8c-4ad3-b421-1fc33af08b37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.373699] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-edb86c10-013e-41a2-ac50-9425f3d6c30c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.513900] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253908} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.514180] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1359.514945] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a452989a-5f51-4cdd-9c76-08a1b248dbea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.538220] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b/2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1359.538220] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-554c0aa0-58b7-4be5-8afd-03a69c79dea3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.556564] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1359.556564] env[62525]: value = "task-1780972" [ 1359.556564] env[62525]: _type = "Task" [ 1359.556564] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.567881] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780972, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.581498] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1359.651940] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780970, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.662908] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780971, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.870352] env[62525]: INFO nova.compute.manager [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Took 36.23 seconds to build instance. [ 1360.069047] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780972, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.086108] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f2240974-0fa4-4f59-ae0c-b9da52f9600e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.152990] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780970, 'name': CreateVM_Task, 'duration_secs': 0.660508} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.152990] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1360.153648] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.153903] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.154167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1360.154478] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b01bbeea-2d49-4331-9ab1-3cf910ab9325 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.162503] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1360.162503] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52057e39-8eff-cc00-d579-4aed6ad962dd" [ 1360.162503] env[62525]: _type = "Task" [ 1360.162503] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.166121] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780971, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.174236] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52057e39-8eff-cc00-d579-4aed6ad962dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.269053] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "e34ebddc-2192-4975-81d7-0f5c200f114e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.372805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35c3ca52-7756-4305-891e-d7789c246f29 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.358s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.375664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.106s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.375664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.375664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.375664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.378271] env[62525]: INFO nova.compute.manager [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Terminating instance [ 1360.381795] env[62525]: DEBUG nova.compute.manager [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1360.382427] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1360.383352] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0cec29-5324-4f19-933a-5dfa00996203 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.392103] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1360.392712] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d69920e9-7489-49c2-ade5-f45aedcedcd9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.403300] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1360.403300] env[62525]: value = "task-1780973" [ 1360.403300] env[62525]: _type = "Task" [ 1360.403300] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.413785] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.568302] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780972, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.589433] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 3455a540-7fbc-46ba-b7d6-84a345c0463e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.665575] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780971, 'name': CreateSnapshot_Task, 'duration_secs': 1.018517} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.665985] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1360.667358] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2c0028-ab79-469f-9ea8-22b09fa406e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.683791] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52057e39-8eff-cc00-d579-4aed6ad962dd, 'name': SearchDatastore_Task, 'duration_secs': 0.011238} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.684087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.684360] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1360.684637] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1360.684848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.685075] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.685586] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7560e88f-a11c-48bf-aefc-db1c18abdf60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.694114] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.694333] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1360.695067] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeb8ab76-7c94-4cc0-b58f-ff7039a81334 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.701945] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1360.701945] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523d96c6-dc23-22bf-1fbf-5d88392f88f1" [ 1360.701945] env[62525]: _type = "Task" [ 1360.701945] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.710287] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523d96c6-dc23-22bf-1fbf-5d88392f88f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.876261] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1360.912260] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780973, 'name': PowerOffVM_Task, 'duration_secs': 0.21587} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.912485] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1360.912649] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1360.912896] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8031c917-8475-4ace-ac44-d6771c46da99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.993035] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.993254] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.993439] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Deleting the datastore file [datastore1] e34ebddc-2192-4975-81d7-0f5c200f114e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.993697] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c8d8f96-9338-44b9-9917-45c6c801ff26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.000849] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for the task: (returnval){ [ 1361.000849] env[62525]: value = "task-1780975" [ 1361.000849] env[62525]: _type = "Task" [ 1361.000849] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.011647] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.068932] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780972, 'name': ReconfigVM_Task, 'duration_secs': 1.371769} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.069248] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b/2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.069947] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36f4946c-033d-425b-a006-7faecf99ed2c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.076378] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1361.076378] env[62525]: value = "task-1780976" [ 1361.076378] env[62525]: _type = "Task" [ 1361.076378] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.085153] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780976, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.092090] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1361.194101] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1361.194483] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fff18d4b-98c6-4bd1-9675-6233b95cdd81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.203801] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1361.203801] env[62525]: value = "task-1780977" [ 1361.203801] env[62525]: _type = "Task" [ 1361.203801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.215456] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523d96c6-dc23-22bf-1fbf-5d88392f88f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009429} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.219445] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780977, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.219715] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3f1231-a60f-46e8-b7b3-18b5764be538 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.225033] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1361.225033] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5208eea0-6107-a4a3-a3f1-35d4ad362f21" [ 1361.225033] env[62525]: _type = "Task" [ 1361.225033] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.233337] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5208eea0-6107-a4a3-a3f1-35d4ad362f21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.407356] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.513519] env[62525]: DEBUG oslo_vmware.api [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Task: {'id': task-1780975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24954} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.513840] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.514090] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1361.514328] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1361.514583] env[62525]: INFO nova.compute.manager [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1361.514880] env[62525]: DEBUG oslo.service.loopingcall [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1361.515124] env[62525]: DEBUG nova.compute.manager [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1361.515687] env[62525]: DEBUG nova.network.neutron [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.588502] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780976, 'name': Rename_Task, 'duration_secs': 0.245847} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.591502] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.591845] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dc8158b-0724-4cc1-8afd-74d26ba52f9e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.595068] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1361.598818] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1361.598818] env[62525]: value = "task-1780978" [ 1361.598818] env[62525]: _type = "Task" [ 1361.598818] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.607475] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.717815] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780977, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.735713] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5208eea0-6107-a4a3-a3f1-35d4ad362f21, 'name': SearchDatastore_Task, 'duration_secs': 0.01149} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.736043] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.736372] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9dfb7d7f-6656-46fd-969e-c692db1ce507/9dfb7d7f-6656-46fd-969e-c692db1ce507.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1361.736669] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-034ae382-efa1-4643-999e-eeffbafe79e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.743414] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1361.743414] env[62525]: value = "task-1780979" [ 1361.743414] env[62525]: _type = "Task" [ 1361.743414] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.752145] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.882052] env[62525]: DEBUG nova.compute.manager [req-e91ad977-296c-42cd-b489-f4c2b059f2a4 req-10b5ee00-0117-4c11-9778-7a8145340b7c service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Received event network-vif-deleted-945dca79-be4e-4671-b11c-271f1a6db036 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.882379] env[62525]: INFO nova.compute.manager [req-e91ad977-296c-42cd-b489-f4c2b059f2a4 req-10b5ee00-0117-4c11-9778-7a8145340b7c service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Neutron deleted interface 945dca79-be4e-4671-b11c-271f1a6db036; detaching it from the instance and deleting it from the info cache [ 1361.882553] env[62525]: DEBUG nova.network.neutron [req-e91ad977-296c-42cd-b489-f4c2b059f2a4 req-10b5ee00-0117-4c11-9778-7a8145340b7c service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.098333] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f589dc1-9244-475f-86d0-4b69b511508b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1362.110936] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780978, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.223353] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780977, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.255638] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780979, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.360290] env[62525]: DEBUG nova.network.neutron [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.385756] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3e87233-bb28-4998-be09-b8a595457196 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.396041] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2e2c7b-e0ee-43cc-a7ab-4b1cd5b34291 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.429287] env[62525]: DEBUG nova.compute.manager [req-e91ad977-296c-42cd-b489-f4c2b059f2a4 req-10b5ee00-0117-4c11-9778-7a8145340b7c service nova] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Detach interface failed, port_id=945dca79-be4e-4671-b11c-271f1a6db036, reason: Instance e34ebddc-2192-4975-81d7-0f5c200f114e could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1362.604666] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance d2e7c558-02af-477c-b996-239ef14ed75b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1362.612772] env[62525]: DEBUG oslo_vmware.api [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780978, 'name': PowerOnVM_Task, 'duration_secs': 0.586791} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.613082] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.613364] env[62525]: INFO nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1362.613675] env[62525]: DEBUG nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.614477] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69af20fd-2730-43b1-913a-82b2c2a75f5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.725181] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780977, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.759726] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780979, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555957} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.759977] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9dfb7d7f-6656-46fd-969e-c692db1ce507/9dfb7d7f-6656-46fd-969e-c692db1ce507.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1362.760148] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1362.760441] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6fa542a-4ae2-4643-b006-04a2241c2da9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.768660] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1362.768660] env[62525]: value = "task-1780980" [ 1362.768660] env[62525]: _type = "Task" [ 1362.768660] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.779494] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780980, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.862714] env[62525]: INFO nova.compute.manager [-] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Took 1.35 seconds to deallocate network for instance. [ 1363.109344] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1f3792c0-9f86-4d76-a1a6-28d492869046 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1363.130194] env[62525]: INFO nova.compute.manager [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Took 32.63 seconds to build instance. [ 1363.219794] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780977, 'name': CloneVM_Task, 'duration_secs': 1.656535} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.220070] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Created linked-clone VM from snapshot [ 1363.220789] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a7cfce-3243-4a57-8271-4f3b532be23a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.227982] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Uploading image c6e80714-1db7-4d68-ac41-54718eff899f {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1363.267116] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1363.267116] env[62525]: value = "vm-369639" [ 1363.267116] env[62525]: _type = "VirtualMachine" [ 1363.267116] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1363.267534] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88a87842-b4c0-4393-8bb1-70df73e4dbd5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.280337] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780980, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139142} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.282021] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1363.282144] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lease: (returnval){ [ 1363.282144] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52feb18d-767e-b602-2cad-dde67e6b4d40" [ 1363.282144] env[62525]: _type = "HttpNfcLease" [ 1363.282144] env[62525]: } obtained for exporting VM: (result){ [ 1363.282144] env[62525]: value = "vm-369639" [ 1363.282144] env[62525]: _type = "VirtualMachine" [ 1363.282144] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1363.282359] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the lease: (returnval){ [ 1363.282359] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52feb18d-767e-b602-2cad-dde67e6b4d40" [ 1363.282359] env[62525]: _type = "HttpNfcLease" [ 1363.282359] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1363.284050] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b643d9-2470-43d4-aee4-e6463e4be656 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.310053] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 9dfb7d7f-6656-46fd-969e-c692db1ce507/9dfb7d7f-6656-46fd-969e-c692db1ce507.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1363.311762] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a63960ef-b085-417d-bf21-9d10b4df2610 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.326543] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1363.326543] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52feb18d-767e-b602-2cad-dde67e6b4d40" [ 1363.326543] env[62525]: _type = "HttpNfcLease" [ 1363.326543] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1363.331679] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1363.331679] env[62525]: value = "task-1780982" [ 1363.331679] env[62525]: _type = "Task" [ 1363.331679] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.339750] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.369057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.614491] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8864d73-35e6-490b-a07c-e8cac8baf880 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1363.633047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-200c8b72-1aa7-4d63-a163-4ad1e736285a tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.924s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.794498] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1363.794498] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52feb18d-767e-b602-2cad-dde67e6b4d40" [ 1363.794498] env[62525]: _type = "HttpNfcLease" [ 1363.794498] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1363.794900] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1363.794900] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52feb18d-767e-b602-2cad-dde67e6b4d40" [ 1363.794900] env[62525]: _type = "HttpNfcLease" [ 1363.794900] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1363.795721] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb1b234-7b21-40ee-b82d-2ddbe6d14614 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.803948] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1363.804572] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1363.875329] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780982, 'name': ReconfigVM_Task, 'duration_secs': 0.276273} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.875679] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 9dfb7d7f-6656-46fd-969e-c692db1ce507/9dfb7d7f-6656-46fd-969e-c692db1ce507.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1363.876563] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9719f85-2e70-4812-a9fb-8aca16b1c9b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.883238] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1363.883238] env[62525]: value = "task-1780983" [ 1363.883238] env[62525]: _type = "Task" [ 1363.883238] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.893525] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780983, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.901427] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-87717bf0-47bb-40c8-89d8-f0f3358d1557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.118233] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1364.118783] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1364.118985] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1364.136503] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1364.398634] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780983, 'name': Rename_Task, 'duration_secs': 0.149851} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.399057] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1364.399433] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6dc8584f-4931-4121-b001-597600c0d7cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.410599] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1364.410599] env[62525]: value = "task-1780984" [ 1364.410599] env[62525]: _type = "Task" [ 1364.410599] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.422899] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.669463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.768128] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10731e2-1b04-41c0-9c13-eb32d38ddbc7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.779682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eca963f-5f5a-47c9-8f74-dd10232bc808 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.824341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df195de5-3783-4a44-a668-f8db0f3ce734 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.832707] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9124b1-2f9c-497b-a78e-f3eaeee3fdcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.851709] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.922555] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780984, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.354974] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1365.421801] env[62525]: DEBUG oslo_vmware.api [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780984, 'name': PowerOnVM_Task, 'duration_secs': 0.588288} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.422124] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1365.422342] env[62525]: INFO nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Took 10.09 seconds to spawn the instance on the hypervisor. [ 1365.422602] env[62525]: DEBUG nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1365.423587] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07b386e-22d0-471c-a2ad-0cae746341e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.860475] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1365.860855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.349s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.861158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.607s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.862772] env[62525]: INFO nova.compute.claims [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1365.942866] env[62525]: INFO nova.compute.manager [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Took 34.81 seconds to build instance. [ 1366.445343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f826bc2b-f0f0-48a1-802b-f973866007ca tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.145s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.862547] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1366.862883] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1366.948562] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1367.352535] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983b2b8c-8950-4095-ad0d-78f79fc984c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.360845] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5881d9a4-f54d-465f-bf47-8b6bfe52c2d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.397452] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043709c6-7951-4268-83b5-f52513f3712e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.400228] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.400460] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1367.400579] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1367.407293] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4ba946-5878-4fb3-a356-6968d8e93e45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.421261] env[62525]: DEBUG nova.compute.provider_tree [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.468650] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.909575] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Skipping network cache update for instance because it is being deleted. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1367.909928] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Skipping network cache update for instance because it is Building. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1367.924388] env[62525]: DEBUG nova.scheduler.client.report [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.951165] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.951408] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.951623] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1367.951813] env[62525]: DEBUG nova.objects.instance [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lazy-loading 'info_cache' on Instance uuid 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.099462] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1368.100218] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03c9791-f4e2-4dc0-9444-3f9ed427e74e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.106918] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1368.107192] env[62525]: ERROR oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk due to incomplete transfer. [ 1368.107456] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-23c406b6-b5cc-4a56-9ba8-65c82027d43e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.116148] env[62525]: DEBUG oslo_vmware.rw_handles [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ece79e-03b5-1dc4-a033-ee5003f02fbb/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1368.116365] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Uploaded image eda5f6cb-607d-4d11-8755-4520d23c5b9b to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1368.118654] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1368.118924] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2d0c115a-68f9-4b34-be95-94eda2160409 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.125511] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1368.125511] env[62525]: value = "task-1780985" [ 1368.125511] env[62525]: _type = "Task" [ 1368.125511] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.133630] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780985, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.431017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.431576] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1368.434279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.432s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.435935] env[62525]: INFO nova.compute.claims [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.636210] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780985, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.941173] env[62525]: DEBUG nova.compute.utils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1368.945723] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1368.945923] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1369.038054] env[62525]: DEBUG nova.policy [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '531de4ca348f4dd3aebf5d55b44a144d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9938a0e97324286b6c68b4371c911c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1369.139089] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780985, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.353912] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Successfully created port: 9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1369.446111] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1369.643770] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780985, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.654017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "84fbb408-7810-4166-a53e-242d51f60322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.654017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.654017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "84fbb408-7810-4166-a53e-242d51f60322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.654017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.654301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.654819] env[62525]: INFO nova.compute.manager [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Terminating instance [ 1369.656683] env[62525]: DEBUG nova.compute.manager [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1369.657387] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1369.658804] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65a1a14-c060-42f6-af9b-905e970d6a2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.669151] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1369.669398] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-160e6d62-3d26-482d-b42f-bccf2e10c762 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.745115] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1369.745115] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1369.745115] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] 84fbb408-7810-4166-a53e-242d51f60322 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1369.745402] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b61e103-9b32-49dc-83ea-68b30a3d829f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.753281] env[62525]: DEBUG oslo_vmware.api [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1369.753281] env[62525]: value = "task-1780987" [ 1369.753281] env[62525]: _type = "Task" [ 1369.753281] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.764000] env[62525]: DEBUG oslo_vmware.api [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.809516] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.087626] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c4b43a-3d4a-4001-89e9-6591aee5662c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.095297] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.095592] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.095861] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.096044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.096258] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.099284] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51688865-67c0-4c30-ad83-6bb526adc5d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.103272] env[62525]: INFO nova.compute.manager [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Terminating instance [ 1370.105439] env[62525]: DEBUG nova.compute.manager [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1370.105695] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.106840] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc17d68-02cd-4b9b-991f-c65ecc4167c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.142307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48808c08-377c-4a75-b490-c4e1b4995f06 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.149943] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.151534] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dda390bf-78db-4978-aa65-9bc3f16d8fd5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.160764] env[62525]: DEBUG oslo_vmware.api [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1780985, 'name': Destroy_Task, 'duration_secs': 1.814961} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.163160] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b10da0-8be0-4699-b064-cb0cb3e7f834 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.171031] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Destroyed the VM [ 1370.171031] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1370.171031] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a43b5a0a-21f5-485e-bfca-96d968df6d5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.172274] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1370.172274] env[62525]: value = "task-1780988" [ 1370.172274] env[62525]: _type = "Task" [ 1370.172274] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.192984] env[62525]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1370.192984] env[62525]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62525) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1370.192984] env[62525]: DEBUG nova.compute.provider_tree [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.193738] env[62525]: DEBUG nova.compute.utils [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Cleaning up image eda5f6cb-607d-4d11-8755-4520d23c5b9b {{(pid=62525) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 1370.203398] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.265700] env[62525]: WARNING nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 84fbb408-7810-4166-a53e-242d51f60322 [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] Traceback (most recent call last): [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] ds_util.file_delete(self._session, [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] session._wait_for_task(file_delete_task) [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] return self.wait_for_task(task_ref) [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] return evt.wait() [ 1370.265700] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] result = hub.switch() [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] return self.greenlet.switch() [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] self.f(*self.args, **self.kw) [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] raise exceptions.translate_fault(task_info.error) [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 84fbb408-7810-4166-a53e-242d51f60322 [ 1370.266195] env[62525]: ERROR nova.virt.vmwareapi.vmops [instance: 84fbb408-7810-4166-a53e-242d51f60322] [ 1370.266195] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1370.266487] env[62525]: INFO nova.compute.manager [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1370.266487] env[62525]: DEBUG oslo.service.loopingcall [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.266757] env[62525]: DEBUG nova.compute.manager [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1370.266925] env[62525]: DEBUG nova.network.neutron [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1370.312370] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.312596] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1370.312879] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.313220] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.313444] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.313724] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.313946] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.392442] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.392694] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.461872] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1370.501617] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1370.501867] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1370.502034] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1370.502222] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1370.502367] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1370.502517] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1370.502997] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1370.502997] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1370.503144] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1370.503469] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1370.503469] env[62525]: DEBUG nova.virt.hardware [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1370.504689] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d66c3d-c058-4456-a152-656dbbe7ba4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.514980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9d34ae-de79-4774-8e34-9687d20b462e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.684388] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780988, 'name': PowerOffVM_Task, 'duration_secs': 0.187354} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.685026] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1370.685026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1370.686380] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86e8df80-789c-4318-92ac-6722cded2da5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.696705] env[62525]: DEBUG nova.scheduler.client.report [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.702159] env[62525]: DEBUG nova.compute.manager [req-8af6fed4-99c7-4057-9f88-ff3e96b6f013 req-5611a9de-e6e2-4971-8f5c-71868685e1d4 service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Received event network-vif-deleted-cf67116f-5315-4381-89ee-63b52941270b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1370.702272] env[62525]: INFO nova.compute.manager [req-8af6fed4-99c7-4057-9f88-ff3e96b6f013 req-5611a9de-e6e2-4971-8f5c-71868685e1d4 service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Neutron deleted interface cf67116f-5315-4381-89ee-63b52941270b; detaching it from the instance and deleting it from the info cache [ 1370.702427] env[62525]: DEBUG nova.network.neutron [req-8af6fed4-99c7-4057-9f88-ff3e96b6f013 req-5611a9de-e6e2-4971-8f5c-71868685e1d4 service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.809970] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1370.809970] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1370.809970] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Deleting the datastore file [datastore1] 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1370.810251] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98e6d629-7069-46e1-b34e-e8a195f3f208 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.818720] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for the task: (returnval){ [ 1370.818720] env[62525]: value = "task-1780990" [ 1370.818720] env[62525]: _type = "Task" [ 1370.818720] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.825936] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.969074] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Successfully updated port: 9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1371.106720] env[62525]: DEBUG nova.network.neutron [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.204914] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.205560] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1371.208854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.169s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.209078] env[62525]: DEBUG nova.objects.instance [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1371.211833] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-420e5347-652b-4abf-a4ec-77d7e3857a14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.222820] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d71988-0c9e-4dd4-8c3f-dfc67e8053ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.257012] env[62525]: DEBUG nova.compute.manager [req-8af6fed4-99c7-4057-9f88-ff3e96b6f013 req-5611a9de-e6e2-4971-8f5c-71868685e1d4 service nova] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Detach interface failed, port_id=cf67116f-5315-4381-89ee-63b52941270b, reason: Instance 84fbb408-7810-4166-a53e-242d51f60322 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1371.325917] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.473917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.473917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquired lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.473917] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.609218] env[62525]: INFO nova.compute.manager [-] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Took 1.34 seconds to deallocate network for instance. [ 1371.710327] env[62525]: DEBUG nova.compute.utils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1371.711709] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1371.712481] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1371.737962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.770890] env[62525]: DEBUG nova.policy [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca1e5b713111400498f43405ca8123dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '071a34f637574e2ea213429903097ee7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1371.829273] env[62525]: DEBUG oslo_vmware.api [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Task: {'id': task-1780990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.550822} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.829454] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.829608] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.829667] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.829833] env[62525]: INFO nova.compute.manager [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1371.830154] env[62525]: DEBUG oslo.service.loopingcall [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.830376] env[62525]: DEBUG nova.compute.manager [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1371.830471] env[62525]: DEBUG nova.network.neutron [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1372.038112] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1372.115902] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.215622] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1372.221888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-46be49bc-a835-47d8-95f0-0dadee34db61 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.227961] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.649s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.231638] env[62525]: INFO nova.compute.claims [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1372.362290] env[62525]: DEBUG nova.network.neutron [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Updating instance_info_cache with network_info: [{"id": "9c51564d-d867-419a-93d9-ccf23c27f990", "address": "fa:16:3e:60:5e:1a", "network": {"id": "d29d32ef-32b3-4b9e-94df-955a787a9e24", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1942764628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9938a0e97324286b6c68b4371c911c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c51564d-d8", "ovs_interfaceid": "9c51564d-d867-419a-93d9-ccf23c27f990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.369306] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Successfully created port: 7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1372.381809] env[62525]: DEBUG nova.compute.manager [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1372.382838] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a027db-2d0e-4ca8-99c9-79079e9f9aaf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.777606] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Received event network-vif-plugged-9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1372.777831] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Acquiring lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.778102] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.778263] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.778436] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] No waiting events found dispatching network-vif-plugged-9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1372.778593] env[62525]: WARNING nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Received unexpected event network-vif-plugged-9c51564d-d867-419a-93d9-ccf23c27f990 for instance with vm_state building and task_state spawning. [ 1372.778754] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Received event network-changed-9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1372.778904] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Refreshing instance network info cache due to event network-changed-9c51564d-d867-419a-93d9-ccf23c27f990. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1372.779085] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Acquiring lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.862508] env[62525]: DEBUG nova.network.neutron [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.864320] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Releasing lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.864829] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Instance network_info: |[{"id": "9c51564d-d867-419a-93d9-ccf23c27f990", "address": "fa:16:3e:60:5e:1a", "network": {"id": "d29d32ef-32b3-4b9e-94df-955a787a9e24", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1942764628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9938a0e97324286b6c68b4371c911c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c51564d-d8", "ovs_interfaceid": "9c51564d-d867-419a-93d9-ccf23c27f990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1372.865242] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Acquired lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.865432] env[62525]: DEBUG nova.network.neutron [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Refreshing network info cache for port 9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.867032] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:5e:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c51564d-d867-419a-93d9-ccf23c27f990', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1372.874611] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Creating folder: Project (e9938a0e97324286b6c68b4371c911c2). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1372.875265] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efc9efe9-41e3-4f3b-a80f-1e670d82c247 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.887142] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Created folder: Project (e9938a0e97324286b6c68b4371c911c2) in parent group-v369553. [ 1372.887374] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Creating folder: Instances. Parent ref: group-v369640. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1372.887611] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b488984e-ee67-4cca-ad7d-c355ab769a3f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.894277] env[62525]: INFO nova.compute.manager [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] instance snapshotting [ 1372.896835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183615d0-9fba-4418-b917-f3146db019b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.900624] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Created folder: Instances in parent group-v369640. [ 1372.900850] env[62525]: DEBUG oslo.service.loopingcall [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1372.901453] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1372.901656] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f187a1b-da5f-46f1-92a8-257dfc780314 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.930927] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dc3e30-fd01-47f7-bae4-29bc07cb48ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.935461] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1372.935461] env[62525]: value = "task-1780993" [ 1372.935461] env[62525]: _type = "Task" [ 1372.935461] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.947583] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780993, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.234472] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1373.262808] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1373.263042] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1373.263282] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.263517] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1373.263724] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.263993] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1373.264342] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1373.264580] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1373.264816] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1373.265060] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1373.265308] env[62525]: DEBUG nova.virt.hardware [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1373.265905] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.268017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b1c51f-9997-4c39-b492-8fa02a92db10 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.278048] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3506aae8-46cf-414a-9677-bfff5ebe8927 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.365113] env[62525]: INFO nova.compute.manager [-] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Took 1.53 seconds to deallocate network for instance. [ 1373.444523] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1373.448540] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d840841a-fd95-4465-938e-100530d563c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.450464] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780993, 'name': CreateVM_Task, 'duration_secs': 0.415245} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.452779] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1373.453845] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.454056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.454426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1373.454974] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d5d8a0e-878d-4c26-9c3c-072ccff540be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.459994] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1373.459994] env[62525]: value = "task-1780994" [ 1373.459994] env[62525]: _type = "Task" [ 1373.459994] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.467218] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1373.467218] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52425ea1-063a-92c4-21c5-aec273307dd5" [ 1373.467218] env[62525]: _type = "Task" [ 1373.467218] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.478657] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780994, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.483834] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52425ea1-063a-92c4-21c5-aec273307dd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.612513] env[62525]: DEBUG nova.network.neutron [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Updated VIF entry in instance network info cache for port 9c51564d-d867-419a-93d9-ccf23c27f990. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.612862] env[62525]: DEBUG nova.network.neutron [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Updating instance_info_cache with network_info: [{"id": "9c51564d-d867-419a-93d9-ccf23c27f990", "address": "fa:16:3e:60:5e:1a", "network": {"id": "d29d32ef-32b3-4b9e-94df-955a787a9e24", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1942764628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9938a0e97324286b6c68b4371c911c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c51564d-d8", "ovs_interfaceid": "9c51564d-d867-419a-93d9-ccf23c27f990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.773842] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.808583] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56906dd6-9586-4639-849f-584fe856b498 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.816905] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbab755d-d1ce-47fc-92f0-842b08839c34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.851040] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5250d1ab-9a00-4c99-a544-1b9a91c6e0ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.858801] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30537596-4766-43fd-96a8-208270c77609 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.873899] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.874452] env[62525]: DEBUG nova.compute.provider_tree [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.959876] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1373.960830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e807830-855b-4de9-bf27-6a1275b2f665 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.975369] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1373.975549] env[62525]: ERROR oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk due to incomplete transfer. [ 1373.975786] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780994, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.976352] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-68140ac3-cf56-444f-b71a-4c7da6aa32be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.981347] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52425ea1-063a-92c4-21c5-aec273307dd5, 'name': SearchDatastore_Task, 'duration_secs': 0.021437} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.981638] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.981872] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1373.982137] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.982285] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.982457] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1373.983128] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-911d7a2f-1ede-4667-b836-aa3a1a803542 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.985309] env[62525]: DEBUG oslo_vmware.rw_handles [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a7b8c4-7919-c27c-5057-261fc166df3e/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1373.985490] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Uploaded image c6e80714-1db7-4d68-ac41-54718eff899f to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1373.986943] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1373.987535] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-17654133-5bb4-4e74-8e60-336ce11dd250 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.992860] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1373.993063] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1373.994647] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-004b93e3-f0c4-4c80-b835-dcfde31e521e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.998636] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1373.998636] env[62525]: value = "task-1780995" [ 1373.998636] env[62525]: _type = "Task" [ 1373.998636] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.003050] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1374.003050] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cc050d-8223-0543-6384-ee743b698604" [ 1374.003050] env[62525]: _type = "Task" [ 1374.003050] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.008995] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780995, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.013495] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cc050d-8223-0543-6384-ee743b698604, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.108500] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Successfully updated port: 7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1374.116978] env[62525]: DEBUG oslo_concurrency.lockutils [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] Releasing lock "refresh_cache-cfae9bf8-012a-4286-b978-bba8a913bba2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.116978] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Received event network-vif-deleted-c02917b8-9b2b-4fc8-a054-7011866a8326 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.116978] env[62525]: INFO nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Neutron deleted interface c02917b8-9b2b-4fc8-a054-7011866a8326; detaching it from the instance and deleting it from the info cache [ 1374.117222] env[62525]: DEBUG nova.network.neutron [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.377900] env[62525]: DEBUG nova.scheduler.client.report [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1374.474103] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780994, 'name': CreateSnapshot_Task, 'duration_secs': 0.699663} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.474387] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1374.475145] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c700a5bf-db6c-405c-a8ce-c6584c6877e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.506626] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780995, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.514747] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cc050d-8223-0543-6384-ee743b698604, 'name': SearchDatastore_Task, 'duration_secs': 0.033273} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.516100] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-211ea141-ea8b-47cd-90e5-c0ca7b58050a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.520207] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1374.520207] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52967ca8-242d-6032-537e-e1a5821efc83" [ 1374.520207] env[62525]: _type = "Task" [ 1374.520207] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.528059] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52967ca8-242d-6032-537e-e1a5821efc83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.615063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.615063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquired lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.615063] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.623459] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90b36997-fe3a-429e-9373-a9c7ab49b695 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.632867] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7625ae8-0827-48d8-837d-5a6b87c95eeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.665274] env[62525]: DEBUG nova.compute.manager [req-7d739ee4-bf94-4fa3-a64b-3483f1a2eb0b req-2ba09a08-1329-4151-9246-d571d7489371 service nova] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Detach interface failed, port_id=c02917b8-9b2b-4fc8-a054-7011866a8326, reason: Instance 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1374.808095] env[62525]: DEBUG nova.compute.manager [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received event network-vif-plugged-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.808336] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Acquiring lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.808539] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.808702] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.808869] env[62525]: DEBUG nova.compute.manager [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] No waiting events found dispatching network-vif-plugged-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1374.809043] env[62525]: WARNING nova.compute.manager [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received unexpected event network-vif-plugged-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 for instance with vm_state building and task_state spawning. [ 1374.809209] env[62525]: DEBUG nova.compute.manager [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.809432] env[62525]: DEBUG nova.compute.manager [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing instance network info cache due to event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1374.809606] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Acquiring lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.883412] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.883810] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.886552] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.819s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.886739] env[62525]: DEBUG nova.objects.instance [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1374.992296] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1374.993099] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-366c48a2-87f8-42df-84e6-75b7b982940e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.003580] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1375.003580] env[62525]: value = "task-1780996" [ 1375.003580] env[62525]: _type = "Task" [ 1375.003580] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.010327] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780995, 'name': Destroy_Task, 'duration_secs': 0.83514} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.010593] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Destroyed the VM [ 1375.010814] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1375.011095] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-821d44b3-8777-4185-ad6f-08324623048c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.015688] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780996, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.023932] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1375.023932] env[62525]: value = "task-1780997" [ 1375.023932] env[62525]: _type = "Task" [ 1375.023932] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.031749] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52967ca8-242d-6032-537e-e1a5821efc83, 'name': SearchDatastore_Task, 'duration_secs': 0.096157} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.032875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.033159] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cfae9bf8-012a-4286-b978-bba8a913bba2/cfae9bf8-012a-4286-b978-bba8a913bba2.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1375.033397] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca927e65-1e19-48be-887d-971df4fda87e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.037608] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780997, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.042516] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1375.042516] env[62525]: value = "task-1780998" [ 1375.042516] env[62525]: _type = "Task" [ 1375.042516] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.050877] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1780998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.158035] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1375.312606] env[62525]: DEBUG nova.network.neutron [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updating instance_info_cache with network_info: [{"id": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "address": "fa:16:3e:38:65:3a", "network": {"id": "d1a3534b-661d-45a0-a8af-3eeea0cf4cab", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1268159643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071a34f637574e2ea213429903097ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5989b4-cf", "ovs_interfaceid": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.391732] env[62525]: DEBUG nova.compute.utils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.396237] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1375.396423] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1375.442062] env[62525]: DEBUG nova.policy [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac8ec45eacb84ddeb076b04b3b36a901', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b47806d16a8d4156922d49d6c23deb7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.515725] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780996, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.534333] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780997, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.552923] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1780998, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.756358] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Successfully created port: 28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.818179] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Releasing lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.818179] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Instance network_info: |[{"id": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "address": "fa:16:3e:38:65:3a", "network": {"id": "d1a3534b-661d-45a0-a8af-3eeea0cf4cab", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1268159643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071a34f637574e2ea213429903097ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5989b4-cf", "ovs_interfaceid": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1375.818390] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Acquired lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.818390] env[62525]: DEBUG nova.network.neutron [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1375.818390] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:65:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cb94a1a-f287-46e7-b63b-ec692c2141b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f5989b4-cf62-411c-9e0e-1bcbb8f37713', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.825841] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Creating folder: Project (071a34f637574e2ea213429903097ee7). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1375.827023] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c2a80e9-67a8-4807-a617-da1608b1b751 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.841026] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Created folder: Project (071a34f637574e2ea213429903097ee7) in parent group-v369553. [ 1375.841026] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Creating folder: Instances. Parent ref: group-v369645. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1375.841026] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e31b8e0-4950-46fc-9b45-83817915df70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.850584] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Created folder: Instances in parent group-v369645. [ 1375.851378] env[62525]: DEBUG oslo.service.loopingcall [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.851709] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.852077] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8c33b8b-b62f-40ae-a780-484f40a8ac16 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.875676] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.875676] env[62525]: value = "task-1781001" [ 1375.875676] env[62525]: _type = "Task" [ 1375.875676] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.882606] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781001, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.902026] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1375.903250] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43bf8b3a-ee8d-44e6-8893-28aa43db56e6 tempest-ServersAdmin275Test-1428403147 tempest-ServersAdmin275Test-1428403147-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.904842] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.790s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.905213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.909404] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.935s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.913016] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.913016] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.906s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.913324] env[62525]: INFO nova.compute.claims [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1376.017075] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780996, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.023069] env[62525]: INFO nova.scheduler.client.report [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleted allocations for instance b6bdc187-a266-4f7d-a9e4-85cb100cf4bf [ 1376.026755] env[62525]: INFO nova.scheduler.client.report [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleted allocations for instance 8b41bff7-137f-489c-bb88-7487eb8e97cb [ 1376.041238] env[62525]: DEBUG oslo_vmware.api [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1780997, 'name': RemoveSnapshot_Task, 'duration_secs': 0.963136} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.041501] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1376.041742] env[62525]: INFO nova.compute.manager [None req-5c5282ee-e6f2-4de0-b63d-85eca9d10fec tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 17.43 seconds to snapshot the instance on the hypervisor. [ 1376.052952] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1780998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551843} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.053225] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cfae9bf8-012a-4286-b978-bba8a913bba2/cfae9bf8-012a-4286-b978-bba8a913bba2.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.053446] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.053943] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41eb1a7e-7811-4f93-af73-55dc84aa9a9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.061782] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1376.061782] env[62525]: value = "task-1781002" [ 1376.061782] env[62525]: _type = "Task" [ 1376.061782] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.069504] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.384753] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781001, 'name': CreateVM_Task, 'duration_secs': 0.45254} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.385034] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.385800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.385962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.386292] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.386541] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18713c3d-1f93-4746-aeeb-a0cecc2024f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.392665] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1376.392665] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528d579b-0633-bf1a-d701-06e0aafb351f" [ 1376.392665] env[62525]: _type = "Task" [ 1376.392665] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.403237] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528d579b-0633-bf1a-d701-06e0aafb351f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.406789] env[62525]: INFO nova.virt.block_device [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Booting with volume ea89af57-65c0-4ea1-9faf-5585daa59ceb at /dev/sda [ 1376.480642] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9c033bb-a5e6-44c0-b5a2-2020c508f338 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.489409] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78f8060-63a2-4632-b9f7-233af9ae8945 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.515807] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1780996, 'name': CloneVM_Task, 'duration_secs': 1.511628} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.527476] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Created linked-clone VM from snapshot [ 1376.528466] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292c0a68-fa28-4054-889a-e3bf3d7b38bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.530906] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8439e8d-a77d-488a-944e-bf49de5079a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.541228] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Uploading image a067b585-2bce-4333-8b79-129988ed9a15 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1376.543041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-49a4c8af-62e0-472d-b6db-b208a30075ee tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.770s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.546838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8edc10cb-f8cb-4dba-9897-059120c2c77c tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "8b41bff7-137f-489c-bb88-7487eb8e97cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.405s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.548709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 28.472s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.548709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.548709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.548709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.554419] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bf84fb-58c5-48d8-a4fd-a8882ace9a51 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.569042] env[62525]: INFO nova.compute.manager [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Terminating instance [ 1376.569042] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1376.569442] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5fe987df-6b51-4bd0-80a8-39dae64227a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.571581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.571735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquired lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.571891] env[62525]: DEBUG nova.network.neutron [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.586381] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069263} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.586381] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1376.586381] env[62525]: value = "task-1781003" [ 1376.586381] env[62525]: _type = "Task" [ 1376.586381] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.598011] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.599398] env[62525]: DEBUG nova.network.neutron [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updated VIF entry in instance network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.599721] env[62525]: DEBUG nova.network.neutron [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updating instance_info_cache with network_info: [{"id": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "address": "fa:16:3e:38:65:3a", "network": {"id": "d1a3534b-661d-45a0-a8af-3eeea0cf4cab", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1268159643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071a34f637574e2ea213429903097ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5989b4-cf", "ovs_interfaceid": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.603850] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456d2240-15b6-4745-90a7-634b9561862a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.606970] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182e4d2b-ba8a-4c4a-8290-00d3e9c239ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.614652] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781003, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.635600] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] cfae9bf8-012a-4286-b978-bba8a913bba2/cfae9bf8-012a-4286-b978-bba8a913bba2.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.636235] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02b2b8df-8546-42ed-9789-9ca356db6c0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.650609] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae54b6dd-3993-4c1e-8771-deb3c11fb8c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.658668] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1376.658668] env[62525]: value = "task-1781004" [ 1376.658668] env[62525]: _type = "Task" [ 1376.658668] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.666395] env[62525]: DEBUG nova.virt.block_device [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updating existing volume attachment record: 1ae4ee4f-37b9-4909-b4cb-45c86e50526b {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1376.672858] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781004, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.903687] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528d579b-0633-bf1a-d701-06e0aafb351f, 'name': SearchDatastore_Task, 'duration_secs': 0.010337} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.904019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.904277] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.904513] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.904653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.904828] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.905525] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fe1a863-a0bf-4fcd-942e-e29e1926dd08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.913061] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.913244] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1376.918594] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a6c8d93-4d15-4974-b3ed-4c6518f6dd7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.919254] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1376.919254] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52407b2a-bbee-ed09-267e-f70f2fc31696" [ 1376.919254] env[62525]: _type = "Task" [ 1376.919254] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.926963] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52407b2a-bbee-ed09-267e-f70f2fc31696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.077312] env[62525]: DEBUG nova.compute.utils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Can not refresh info_cache because instance was not found {{(pid=62525) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1377.105461] env[62525]: DEBUG nova.network.neutron [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.113227] env[62525]: DEBUG oslo_concurrency.lockutils [req-a834d275-6d0a-4890-98f2-0177d7700ccf req-ae125d8c-6e32-468c-bc2d-c024346e592a service nova] Releasing lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.113784] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781003, 'name': Destroy_Task, 'duration_secs': 0.456603} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.114031] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Destroyed the VM [ 1377.114350] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1377.114513] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a934af36-7077-4fd0-a64a-472aa7538885 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.121639] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1377.121639] env[62525]: value = "task-1781005" [ 1377.121639] env[62525]: _type = "Task" [ 1377.121639] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.130619] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781005, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.169014] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781004, 'name': ReconfigVM_Task, 'duration_secs': 0.354284} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.171971] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Reconfigured VM instance instance-0000001e to attach disk [datastore1] cfae9bf8-012a-4286-b978-bba8a913bba2/cfae9bf8-012a-4286-b978-bba8a913bba2.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.173873] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a0b5383-8d2e-41ce-87e6-a098e2755016 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.178892] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1377.178892] env[62525]: value = "task-1781006" [ 1377.178892] env[62525]: _type = "Task" [ 1377.178892] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.190391] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781006, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.249734] env[62525]: DEBUG nova.network.neutron [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.430039] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52407b2a-bbee-ed09-267e-f70f2fc31696, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.433693] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-439aff04-1432-4e18-a772-4b217e141f54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.438569] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1377.438569] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5259bc2a-6e2b-0237-ca8e-462fdea44288" [ 1377.438569] env[62525]: _type = "Task" [ 1377.438569] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.452696] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Successfully updated port: 28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1377.454097] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5259bc2a-6e2b-0237-ca8e-462fdea44288, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.460907] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.461221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.461331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.461576] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.461662] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.464576] env[62525]: INFO nova.compute.manager [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Terminating instance [ 1377.466457] env[62525]: DEBUG nova.compute.manager [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1377.466631] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.467481] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc76bd31-39ba-4424-b037-393c4c9168f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.471423] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5c9b40-ae1d-435f-b54d-32b8788aa841 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.479982] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.480814] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f25868e-0b30-49a3-a0b3-56666ace8490 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.482953] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9566ad-6a6a-475b-b988-8702e1bacf7f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.518549] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e22f0a-8876-4541-98d6-d8b11492db60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.521268] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1377.521268] env[62525]: value = "task-1781007" [ 1377.521268] env[62525]: _type = "Task" [ 1377.521268] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.528090] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df13965-602c-4e64-8255-17983ae82465 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.534479] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1781007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.544623] env[62525]: DEBUG nova.compute.provider_tree [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.614255] env[62525]: DEBUG nova.compute.manager [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Received event network-vif-plugged-28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.614255] env[62525]: DEBUG oslo_concurrency.lockutils [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] Acquiring lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.614255] env[62525]: DEBUG oslo_concurrency.lockutils [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.614255] env[62525]: DEBUG oslo_concurrency.lockutils [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.614255] env[62525]: DEBUG nova.compute.manager [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] No waiting events found dispatching network-vif-plugged-28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1377.614458] env[62525]: WARNING nova.compute.manager [req-e12507f0-7ef0-4234-8742-384865c1fb9b req-03229f72-4030-478d-9200-abb09d353eb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Received unexpected event network-vif-plugged-28c8b4db-8c62-4e51-a573-d0e05371bbd6 for instance with vm_state building and task_state block_device_mapping. [ 1377.632777] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781005, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.656598] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.656886] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.657098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "7f8392fa-1c11-4180-bda9-057b5cfa058c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.657323] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.657492] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.660037] env[62525]: INFO nova.compute.manager [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Terminating instance [ 1377.661287] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "refresh_cache-7f8392fa-1c11-4180-bda9-057b5cfa058c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.661442] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquired lock "refresh_cache-7f8392fa-1c11-4180-bda9-057b5cfa058c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.661604] env[62525]: DEBUG nova.network.neutron [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.691766] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781006, 'name': Rename_Task, 'duration_secs': 0.14709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.691981] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.692245] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53758811-a6d0-411f-a727-8cb04fa0540b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.699454] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1377.699454] env[62525]: value = "task-1781008" [ 1377.699454] env[62525]: _type = "Task" [ 1377.699454] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.708732] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.752196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Releasing lock "refresh_cache-b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.752610] env[62525]: DEBUG nova.compute.manager [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1377.752819] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.753178] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-445e0fd5-f053-4837-beb3-f01f1e9a19a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.762174] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6b0959-2849-423d-a41d-fd34763bc4a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.795815] env[62525]: WARNING nova.virt.vmwareapi.vmops [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6bdc187-a266-4f7d-a9e4-85cb100cf4bf could not be found. [ 1377.796165] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1377.796420] env[62525]: INFO nova.compute.manager [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1377.796597] env[62525]: DEBUG oslo.service.loopingcall [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1377.799293] env[62525]: DEBUG nova.compute.manager [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1377.799293] env[62525]: DEBUG nova.network.neutron [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1377.820021] env[62525]: DEBUG nova.network.neutron [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.950575] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5259bc2a-6e2b-0237-ca8e-462fdea44288, 'name': SearchDatastore_Task, 'duration_secs': 0.009681} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.950785] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.951048] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e3f3fc2c-0060-4521-8aa3-da37209aee81/e3f3fc2c-0060-4521-8aa3-da37209aee81.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1377.951322] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07eb649a-e0f4-4c69-9f1f-2c555e898f4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.958632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.958632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquired lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.958632] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.960246] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1377.960246] env[62525]: value = "task-1781009" [ 1377.960246] env[62525]: _type = "Task" [ 1377.960246] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.969070] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.032045] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1781007, 'name': PowerOffVM_Task, 'duration_secs': 0.231814} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.032374] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1378.032643] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.032978] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e6d1093-923e-4b8d-8761-e272c4bb3768 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.047454] env[62525]: DEBUG nova.scheduler.client.report [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.133800] env[62525]: DEBUG oslo_vmware.api [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781005, 'name': RemoveSnapshot_Task, 'duration_secs': 0.594455} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.134388] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1378.192486] env[62525]: DEBUG nova.network.neutron [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1378.211659] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781008, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.225738] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.226012] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.226301] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Deleting the datastore file [datastore1] 35a2e221-e1c5-49d9-af93-5e5f28c62b8f {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.226653] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb20cdb3-b521-4431-95db-06678a4b3a5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.236311] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for the task: (returnval){ [ 1378.236311] env[62525]: value = "task-1781011" [ 1378.236311] env[62525]: _type = "Task" [ 1378.236311] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.246716] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1781011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.289038] env[62525]: DEBUG nova.network.neutron [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.322684] env[62525]: DEBUG nova.network.neutron [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.395136] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.395505] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.395737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.395932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.396119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.398493] env[62525]: INFO nova.compute.manager [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Terminating instance [ 1378.401028] env[62525]: DEBUG nova.compute.manager [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1378.401222] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1378.402111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb85c4c-b06c-4060-9570-cbfbc6b0c5cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.410503] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1378.410791] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8363d40-d0ea-4f10-9961-a775ebc1e601 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.420385] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1378.420385] env[62525]: value = "task-1781012" [ 1378.420385] env[62525]: _type = "Task" [ 1378.420385] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.429504] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1781012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.472889] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781009, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.496122] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1378.553141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.553757] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1378.556833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.617s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.557088] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.561030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.690s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.561030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.562777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.277s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.562975] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.565243] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.018s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.567339] env[62525]: INFO nova.compute.claims [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1378.626029] env[62525]: INFO nova.scheduler.client.report [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted allocations for instance e3255df2-2de0-4668-ad7b-a864ea680b44 [ 1378.627927] env[62525]: INFO nova.scheduler.client.report [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted allocations for instance 1277dac8-3a23-4de8-93c7-c967b0eaf6ba [ 1378.644627] env[62525]: WARNING nova.compute.manager [None req-444bbe43-cafb-4f79-8e2f-3523ce8565d3 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Image not found during snapshot: nova.exception.ImageNotFound: Image a067b585-2bce-4333-8b79-129988ed9a15 could not be found. [ 1378.647924] env[62525]: INFO nova.scheduler.client.report [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted allocations for instance 82ea280a-4e1b-4fac-a634-7f79ce731564 [ 1378.664934] env[62525]: DEBUG nova.network.neutron [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updating instance_info_cache with network_info: [{"id": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "address": "fa:16:3e:19:b8:51", "network": {"id": "3a2a43e0-d31e-47c0-9712-ccff4732a063", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-50961704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b47806d16a8d4156922d49d6c23deb7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c8b4db-8c", "ovs_interfaceid": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.709948] env[62525]: DEBUG oslo_vmware.api [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781008, 'name': PowerOnVM_Task, 'duration_secs': 0.556264} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.710250] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1378.710448] env[62525]: INFO nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1378.710623] env[62525]: DEBUG nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1378.711961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b20fb8-c3b3-468c-a3c4-b25bb1aa8400 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.721142] env[62525]: WARNING oslo_messaging._drivers.amqpdriver [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1378.748855] env[62525]: DEBUG oslo_vmware.api [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Task: {'id': task-1781011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354036} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.749118] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.749303] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.749478] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.749642] env[62525]: INFO nova.compute.manager [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1378.749882] env[62525]: DEBUG oslo.service.loopingcall [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.750078] env[62525]: DEBUG nova.compute.manager [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1378.750174] env[62525]: DEBUG nova.network.neutron [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1378.791710] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Releasing lock "refresh_cache-7f8392fa-1c11-4180-bda9-057b5cfa058c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.792193] env[62525]: DEBUG nova.compute.manager [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1378.792397] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1378.793267] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cffe9fb-c1ec-4cdb-bc91-fcf76f8400d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.801958] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1378.802218] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c001dc97-3fee-44d4-982d-19d4344a4128 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.805012] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1378.805517] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1378.805727] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1378.805880] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1378.806067] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1378.806330] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1378.806507] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1378.806774] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1378.807329] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1378.807421] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1378.808841] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1378.808841] env[62525]: DEBUG nova.virt.hardware [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1378.809908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb29c2c4-ede6-43d3-b0a0-7011081d7f58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.812652] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1378.812652] env[62525]: value = "task-1781013" [ 1378.812652] env[62525]: _type = "Task" [ 1378.812652] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.822866] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a0ae1d-5695-4de6-bc64-c6b757d627d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.832828] env[62525]: INFO nova.compute.manager [-] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Took 1.03 seconds to deallocate network for instance. [ 1378.833169] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1781013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.930520] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1781012, 'name': PowerOffVM_Task, 'duration_secs': 0.246861} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.930811] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1378.931026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.931275] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddd32d13-43f7-417c-b250-216fd1278154 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.972231] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516718} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.972231] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e3f3fc2c-0060-4521-8aa3-da37209aee81/e3f3fc2c-0060-4521-8aa3-da37209aee81.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1378.972402] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1378.972540] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e017ca6a-a920-432c-a537-d683065bc1e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.979066] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1378.979066] env[62525]: value = "task-1781015" [ 1378.979066] env[62525]: _type = "Task" [ 1378.979066] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.987618] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781015, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.075943] env[62525]: DEBUG nova.compute.utils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1379.077951] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1379.077951] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1379.133745] env[62525]: DEBUG nova.policy [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b2a3e9006c44ebabc5a73be540b9045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4685480cae574a5daac6a1f077a8c319', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1379.140851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3502c618-00ec-47b9-aa63-2949d839bb66 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "e3255df2-2de0-4668-ad7b-a864ea680b44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.901s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.142262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8621eaaa-19f8-4c63-a02b-8c338ddb61be tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1277dac8-3a23-4de8-93c7-c967b0eaf6ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.758s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.156034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c876882f-c9f5-47d7-88d8-b9dbe65325c9 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "82ea280a-4e1b-4fac-a634-7f79ce731564" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.983s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.168038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Releasing lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.168038] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Instance network_info: |[{"id": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "address": "fa:16:3e:19:b8:51", "network": {"id": "3a2a43e0-d31e-47c0-9712-ccff4732a063", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-50961704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b47806d16a8d4156922d49d6c23deb7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c8b4db-8c", "ovs_interfaceid": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1379.168706] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:b8:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28c8b4db-8c62-4e51-a573-d0e05371bbd6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.177332] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Creating folder: Project (b47806d16a8d4156922d49d6c23deb7a). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.179219] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7329144f-23a5-4c15-8066-bc603c9a418c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.197129] env[62525]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1379.197417] env[62525]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62525) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1379.197992] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Folder already exists: Project (b47806d16a8d4156922d49d6c23deb7a). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1379.198356] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Creating folder: Instances. Parent ref: group-v369583. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.199854] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c58bfe3-70e7-4e0c-8ace-1bd438be9107 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.212204] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Created folder: Instances in parent group-v369583. [ 1379.213591] env[62525]: DEBUG oslo.service.loopingcall [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.213591] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1379.213591] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bd12831-d5ce-484e-beeb-2164745852cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.252172] env[62525]: INFO nova.compute.manager [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Took 44.01 seconds to build instance. [ 1379.257059] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1379.257059] env[62525]: value = "task-1781018" [ 1379.257059] env[62525]: _type = "Task" [ 1379.257059] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.273435] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781018, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.275640] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1379.275998] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1379.276326] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleting the datastore file [datastore1] 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1379.276990] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d413a6c0-5d17-4585-ae7e-e457e2acb8c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.286447] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for the task: (returnval){ [ 1379.286447] env[62525]: value = "task-1781019" [ 1379.286447] env[62525]: _type = "Task" [ 1379.286447] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.296493] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1781019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.324234] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1781013, 'name': PowerOffVM_Task, 'duration_secs': 0.196455} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.324518] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1379.324705] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1379.324979] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d1f881e-70a3-4170-aa05-e1175bef48a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.347718] env[62525]: INFO nova.compute.manager [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance disappeared during terminate [ 1379.347718] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b792437c-2b4b-42af-9349-f4b56dfa3ff8 tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "b6bdc187-a266-4f7d-a9e4-85cb100cf4bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.800s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.349902] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1379.350129] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1379.350318] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleting the datastore file [datastore1] 7f8392fa-1c11-4180-bda9-057b5cfa058c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1379.350824] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17bcc92e-36a0-491c-a686-e8e88ee9bf48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.358245] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for the task: (returnval){ [ 1379.358245] env[62525]: value = "task-1781021" [ 1379.358245] env[62525]: _type = "Task" [ 1379.358245] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.367458] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1781021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.444144] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Successfully created port: fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.522024] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781015, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067891} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.522024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1379.522564] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78095e44-0fa9-4542-8ccc-93cd995a6ae1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.581740] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] e3f3fc2c-0060-4521-8aa3-da37209aee81/e3f3fc2c-0060-4521-8aa3-da37209aee81.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1379.586325] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1379.590097] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ba75afa-7e95-4ea3-aa54-17617f7a4747 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.617854] env[62525]: DEBUG nova.network.neutron [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.619259] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1379.619259] env[62525]: value = "task-1781022" [ 1379.619259] env[62525]: _type = "Task" [ 1379.619259] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.630162] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781022, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.685459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.685459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.685605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.685819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.686022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.691164] env[62525]: INFO nova.compute.manager [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Terminating instance [ 1379.692786] env[62525]: DEBUG nova.compute.manager [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1379.693076] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1379.693844] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e61d111-7a39-4c48-8e57-d6dfe2eb508f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.702134] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1379.702453] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-757e5446-5533-4187-82f4-09779fa5c685 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.712119] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1379.712119] env[62525]: value = "task-1781023" [ 1379.712119] env[62525]: _type = "Task" [ 1379.712119] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.720070] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.758098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-247cbcc7-c675-40a6-b497-95f70bb2da6f tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.455s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.768793] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781018, 'name': CreateVM_Task, 'duration_secs': 0.412078} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.771958] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1379.773692] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369595', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'name': 'volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'serial': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb'}, 'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '1ae4ee4f-37b9-4909-b4cb-45c86e50526b', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62525) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1379.773890] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Root volume attach. Driver type: vmdk {{(pid=62525) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1379.775264] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1e9667-f8ff-46bb-8268-dbfe0cb6cefe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.787711] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee91e0e9-30e1-4424-98f7-0c9338542788 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.804437] env[62525]: DEBUG oslo_vmware.api [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Task: {'id': task-1781019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194016} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.805028] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1379.805272] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1379.805479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1379.805658] env[62525]: INFO nova.compute.manager [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1379.805904] env[62525]: DEBUG oslo.service.loopingcall [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.806716] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf33079d-ca09-4b14-9917-a72d92d21f7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.816060] env[62525]: DEBUG nova.compute.manager [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1379.816213] env[62525]: DEBUG nova.network.neutron [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.823574] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-32515004-b921-4e67-84fc-c5ddea2801ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.831105] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1379.831105] env[62525]: value = "task-1781024" [ 1379.831105] env[62525]: _type = "Task" [ 1379.831105] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.842564] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781024, 'name': RelocateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.853014] env[62525]: DEBUG nova.compute.manager [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Received event network-changed-28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.853232] env[62525]: DEBUG nova.compute.manager [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Refreshing instance network info cache due to event network-changed-28c8b4db-8c62-4e51-a573-d0e05371bbd6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1379.853439] env[62525]: DEBUG oslo_concurrency.lockutils [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] Acquiring lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.853584] env[62525]: DEBUG oslo_concurrency.lockutils [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] Acquired lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.853737] env[62525]: DEBUG nova.network.neutron [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Refreshing network info cache for port 28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.869645] env[62525]: DEBUG oslo_vmware.api [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Task: {'id': task-1781021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.870752] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1379.870918] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1379.871210] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1379.871463] env[62525]: INFO nova.compute.manager [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1379.871757] env[62525]: DEBUG oslo.service.loopingcall [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.872317] env[62525]: DEBUG nova.compute.manager [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1379.872421] env[62525]: DEBUG nova.network.neutron [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.894769] env[62525]: DEBUG nova.network.neutron [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1380.125276] env[62525]: INFO nova.compute.manager [-] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Took 1.37 seconds to deallocate network for instance. [ 1380.136698] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781022, 'name': ReconfigVM_Task, 'duration_secs': 0.378672} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.137904] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Reconfigured VM instance instance-0000001f to attach disk [datastore1] e3f3fc2c-0060-4521-8aa3-da37209aee81/e3f3fc2c-0060-4521-8aa3-da37209aee81.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.137904] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a40fab80-f9a3-4774-962b-33d9b35d38cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.145198] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1380.145198] env[62525]: value = "task-1781025" [ 1380.145198] env[62525]: _type = "Task" [ 1380.145198] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.159384] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781025, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.194768] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2878515-1a79-48bb-b612-f7615f8507c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.203257] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae379a60-78e6-4173-b73a-cbcad7d548a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.243464] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a7536-9693-46f8-bb70-9def2653ac2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.249478] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781023, 'name': PowerOffVM_Task, 'duration_secs': 0.222276} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.251580] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.251580] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1380.252591] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22c03961-cb6c-402d-9e84-921c734f67b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.254452] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c362a4f-37a5-4780-bd1d-4444cd2943ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.268399] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1380.272707] env[62525]: DEBUG nova.compute.provider_tree [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.319021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.319439] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.319819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.319967] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.320173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.323288] env[62525]: INFO nova.compute.manager [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Terminating instance [ 1380.325574] env[62525]: DEBUG nova.compute.manager [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1380.325817] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1380.326694] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c18b3c-6365-4177-9a4d-c00193add447 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.330635] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.330824] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.331027] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleting the datastore file [datastore1] 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.331889] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b45abefe-01db-4335-b498-2ba1622f7195 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.335744] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.338989] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2e5305d-7e43-43b0-9a08-1e79ea1e93ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.341220] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1380.341220] env[62525]: value = "task-1781027" [ 1380.341220] env[62525]: _type = "Task" [ 1380.341220] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.348817] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781024, 'name': RelocateVM_Task, 'duration_secs': 0.422557} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.349366] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1380.349366] env[62525]: value = "task-1781028" [ 1380.349366] env[62525]: _type = "Task" [ 1380.349366] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.349953] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1380.350557] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369595', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'name': 'volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'serial': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1380.351075] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e57730-58e6-4c31-ae77-1f01f4fb553e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.361346] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.367446] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.379645] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a20a94-8375-4f34-865d-f751a8fb4667 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.403836] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb/volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1380.406271] env[62525]: DEBUG nova.network.neutron [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.407383] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12601bbc-3cce-4ff9-99f2-6c82f5026225 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.427999] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1380.427999] env[62525]: value = "task-1781029" [ 1380.427999] env[62525]: _type = "Task" [ 1380.427999] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.438071] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.595844] env[62525]: DEBUG nova.network.neutron [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updated VIF entry in instance network info cache for port 28c8b4db-8c62-4e51-a573-d0e05371bbd6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.596206] env[62525]: DEBUG nova.network.neutron [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updating instance_info_cache with network_info: [{"id": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "address": "fa:16:3e:19:b8:51", "network": {"id": "3a2a43e0-d31e-47c0-9712-ccff4732a063", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-50961704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b47806d16a8d4156922d49d6c23deb7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c8b4db-8c", "ovs_interfaceid": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.618449] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1380.626099] env[62525]: DEBUG nova.network.neutron [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.637731] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.639920] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1380.640159] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1380.640316] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1380.640494] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1380.640636] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1380.640779] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1380.640976] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1380.641176] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1380.641344] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1380.641504] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1380.641671] env[62525]: DEBUG nova.virt.hardware [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1380.643129] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abdc711-25f1-4981-a5b7-9a28c257b4f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.654395] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781025, 'name': Rename_Task, 'duration_secs': 0.154506} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.656586] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1380.656844] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abfffc8d-2107-491b-8f65-f388a706be0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.659241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049af1fd-2a1f-4d76-92a7-0393db613588 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.675112] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1380.675112] env[62525]: value = "task-1781030" [ 1380.675112] env[62525]: _type = "Task" [ 1380.675112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.683296] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.776413] env[62525]: DEBUG nova.scheduler.client.report [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1380.801485] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.853036] env[62525]: DEBUG oslo_vmware.api [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213008} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.856923] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.857281] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1380.857402] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1380.857589] env[62525]: INFO nova.compute.manager [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1380.857829] env[62525]: DEBUG oslo.service.loopingcall [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.858064] env[62525]: DEBUG nova.compute.manager [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1380.858162] env[62525]: DEBUG nova.network.neutron [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1380.866061] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781028, 'name': PowerOffVM_Task, 'duration_secs': 0.212589} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.866061] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.866216] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1380.866412] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b7e43c0-0058-4ebf-9f55-94971ffc4a9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.923950] env[62525]: INFO nova.compute.manager [-] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Took 1.05 seconds to deallocate network for instance. [ 1380.942742] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781029, 'name': ReconfigVM_Task, 'duration_secs': 0.281801} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.943200] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Reconfigured VM instance instance-00000020 to attach disk [datastore1] volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb/volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.951574] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf6ab7f1-38d6-4cb7-9d92-e8e5191e36ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.965278] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.965945] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.965945] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleting the datastore file [datastore1] 9dfb7d7f-6656-46fd-969e-c692db1ce507 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.966746] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceab566f-e320-4a5e-a4b0-b87df57fc767 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.971337] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1380.971337] env[62525]: value = "task-1781032" [ 1380.971337] env[62525]: _type = "Task" [ 1380.971337] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.981299] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1380.981299] env[62525]: value = "task-1781033" [ 1380.981299] env[62525]: _type = "Task" [ 1380.981299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.990933] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781032, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.997327] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.065568] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "interface-cfae9bf8-012a-4286-b978-bba8a913bba2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.065926] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "interface-cfae9bf8-012a-4286-b978-bba8a913bba2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.066309] env[62525]: DEBUG nova.objects.instance [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lazy-loading 'flavor' on Instance uuid cfae9bf8-012a-4286-b978-bba8a913bba2 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.099030] env[62525]: DEBUG oslo_concurrency.lockutils [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] Releasing lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.099346] env[62525]: DEBUG nova.compute.manager [req-633734c4-163d-4342-942e-c6ab2d53b834 req-c72318f6-dd23-4e79-a0c0-a2bed98addb0 service nova] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Received event network-vif-deleted-3030b0dc-e404-441e-ab9d-2a05ca9d68e2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.128965] env[62525]: INFO nova.compute.manager [-] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Took 1.31 seconds to deallocate network for instance. [ 1381.187398] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781030, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.220375] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Successfully updated port: fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.287245] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.289194] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1381.290814] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.255s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.290874] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.293951] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.777s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.294172] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.296234] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.094s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.301053] env[62525]: INFO nova.compute.claims [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1381.403497] env[62525]: INFO nova.scheduler.client.report [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Deleted allocations for instance f4cf1732-6b6a-47be-acf4-b127bc4b9baf [ 1381.422083] env[62525]: INFO nova.scheduler.client.report [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleted allocations for instance d38bbd59-b40c-4965-b823-caefc93e2568 [ 1381.434232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.481963] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781032, 'name': ReconfigVM_Task, 'duration_secs': 0.136613} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.482537] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369595', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'name': 'volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'serial': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1381.487217] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a597e41f-5ae5-4fd0-b1b4-02e8246a0fed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.493622] env[62525]: DEBUG oslo_vmware.api [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199104} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.495321] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.495527] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1381.495747] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1381.495964] env[62525]: INFO nova.compute.manager [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1381.496234] env[62525]: DEBUG oslo.service.loopingcall [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.496513] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1381.496513] env[62525]: value = "task-1781034" [ 1381.496513] env[62525]: _type = "Task" [ 1381.496513] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.496931] env[62525]: DEBUG nova.compute.manager [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1381.497039] env[62525]: DEBUG nova.network.neutron [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1381.507113] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781034, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.570608] env[62525]: DEBUG nova.objects.instance [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lazy-loading 'pci_requests' on Instance uuid cfae9bf8-012a-4286-b978-bba8a913bba2 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.613212] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.613506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.634883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.644666] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "10f10329-9a7d-4e1b-8fb4-90350169e518" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.644893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.687335] env[62525]: DEBUG oslo_vmware.api [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781030, 'name': PowerOnVM_Task, 'duration_secs': 0.57133} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.687634] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1381.687869] env[62525]: INFO nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1381.688098] env[62525]: DEBUG nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1381.688947] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95a599f-c155-44cf-8f3c-5e2ffa3b4ffc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.725454] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.725684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.725905] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.805463] env[62525]: DEBUG nova.compute.utils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1381.811299] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1381.811299] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1381.857430] env[62525]: DEBUG nova.network.neutron [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.874028] env[62525]: DEBUG nova.policy [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78ec441ca09f4b9eb8491be1e5cf81e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bb3230ddac441d5b85601eb9e6b5da8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1381.915565] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fcb8b48b-6627-4421-8cca-16df8a9b383e tempest-ServersAdmin275Test-1121358306 tempest-ServersAdmin275Test-1121358306-project-member] Lock "f4cf1732-6b6a-47be-acf4-b127bc4b9baf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.737s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.929160] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8a869cb-501c-4671-8965-9c1bb02effc9 tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "d38bbd59-b40c-4965-b823-caefc93e2568" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.846s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.008090] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781034, 'name': Rename_Task, 'duration_secs': 0.136952} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.008407] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.008684] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a580f497-429e-4071-8704-b47417bedde4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.015105] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1382.015105] env[62525]: value = "task-1781035" [ 1382.015105] env[62525]: _type = "Task" [ 1382.015105] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.022649] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.072543] env[62525]: DEBUG nova.objects.base [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1382.072760] env[62525]: DEBUG nova.network.neutron [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.205604] env[62525]: INFO nova.compute.manager [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Took 45.23 seconds to build instance. [ 1382.210214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0df04a75-b48f-4c84-90fa-ae0712450adb tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "interface-cfae9bf8-012a-4286-b978-bba8a913bba2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.144s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.212883] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Successfully created port: 0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1382.276653] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.309606] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1382.334057] env[62525]: DEBUG nova.network.neutron [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.360575] env[62525]: INFO nova.compute.manager [-] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Took 1.50 seconds to deallocate network for instance. [ 1382.506234] env[62525]: DEBUG nova.network.neutron [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Updating instance_info_cache with network_info: [{"id": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "address": "fa:16:3e:5f:9b:e6", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd035d09-ef", "ovs_interfaceid": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.530945] env[62525]: DEBUG oslo_vmware.api [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781035, 'name': PowerOnVM_Task, 'duration_secs': 0.473539} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.531221] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.531425] env[62525]: INFO nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Took 3.73 seconds to spawn the instance on the hypervisor. [ 1382.531594] env[62525]: DEBUG nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1382.533023] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2a2f87-04a6-4651-8138-0300e3fca63c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.623066] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Received event network-vif-deleted-1790239c-c6c1-47bb-ac87-c96e5a2f2e8d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.623066] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Received event network-vif-plugged-fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.623066] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Acquiring lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.623562] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.623562] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.623632] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] No waiting events found dispatching network-vif-plugged-fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1382.623776] env[62525]: WARNING nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Received unexpected event network-vif-plugged-fd035d09-ef89-4a5c-ac55-b849f0b4cc48 for instance with vm_state building and task_state spawning. [ 1382.624656] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Received event network-changed-fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.624656] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Refreshing instance network info cache due to event network-changed-fd035d09-ef89-4a5c-ac55-b849f0b4cc48. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1382.624656] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Acquiring lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.707881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2213441f-f4ec-4561-a2ac-cbd8790f00d1 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.066s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.837454] env[62525]: INFO nova.compute.manager [-] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Took 1.34 seconds to deallocate network for instance. [ 1382.852249] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d5e41c-3225-4b04-8757-6b8cfdefc032 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.861181] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed01a01c-e20a-4142-b8a9-afa6e17ee050 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.867007] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.892263] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecc256a-1f17-45a8-8d82-73364367d01e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.900600] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1e2d4a-f756-4919-903b-ec4e31daf63a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.914507] env[62525]: DEBUG nova.compute.provider_tree [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.008651] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.009029] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Instance network_info: |[{"id": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "address": "fa:16:3e:5f:9b:e6", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd035d09-ef", "ovs_interfaceid": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1383.009338] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Acquired lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.009513] env[62525]: DEBUG nova.network.neutron [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Refreshing network info cache for port fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.010746] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:9b:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd035d09-ef89-4a5c-ac55-b849f0b4cc48', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.018914] env[62525]: DEBUG oslo.service.loopingcall [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.019406] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.020160] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55fd9454-8ec1-4a1a-9382-eb448f941c46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.046347] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.046347] env[62525]: value = "task-1781036" [ 1383.046347] env[62525]: _type = "Task" [ 1383.046347] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.059717] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781036, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.061167] env[62525]: INFO nova.compute.manager [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Took 39.50 seconds to build instance. [ 1383.210267] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1383.319159] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1383.343928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.347097] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.347447] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.347643] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.347836] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.347982] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.348483] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.348869] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.349075] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.349253] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.349592] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.349984] env[62525]: DEBUG nova.virt.hardware [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.350840] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f3058e-0306-42e4-96b8-630ad0eccce9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.359099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643f35a8-6a0e-44e7-a23c-c6b73f7c1478 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.437922] env[62525]: ERROR nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [req-6b0a0393-3084-4c66-b849-d55c41dcf978] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b0a0393-3084-4c66-b849-d55c41dcf978"}]} [ 1383.455083] env[62525]: DEBUG nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1383.475085] env[62525]: DEBUG nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1383.475345] env[62525]: DEBUG nova.compute.provider_tree [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.491358] env[62525]: DEBUG nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1383.512877] env[62525]: DEBUG nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1383.569406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-09a41333-f045-4c32-b7af-cc432b81a4e0 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.829s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.569508] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781036, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.740214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.853713] env[62525]: DEBUG nova.network.neutron [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Updated VIF entry in instance network info cache for port fd035d09-ef89-4a5c-ac55-b849f0b4cc48. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1383.854113] env[62525]: DEBUG nova.network.neutron [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Updating instance_info_cache with network_info: [{"id": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "address": "fa:16:3e:5f:9b:e6", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd035d09-ef", "ovs_interfaceid": "fd035d09-ef89-4a5c-ac55-b849f0b4cc48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.018549] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Successfully updated port: 0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.073254] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781036, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.077110] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1384.191159] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a20fd7-18fc-4e85-ade1-ce9495dfbcf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.199768] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34596ebe-69b1-40b5-996e-293d29d05e46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.235908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb345d6-ff4d-4bde-ae45-79643a37f729 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.244016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52353d7f-a6a8-4846-b501-7abf3f12c966 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.258831] env[62525]: DEBUG nova.compute.provider_tree [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.357361] env[62525]: DEBUG oslo_concurrency.lockutils [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] Releasing lock "refresh_cache-9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.357719] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Received event network-vif-deleted-42ec407b-c27a-4d4f-9c35-6c5a65f5db02 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.357916] env[62525]: DEBUG nova.compute.manager [req-7413757b-f7c4-496e-8e1c-90fe5df5c4ca req-d5997aa6-0378-404e-b00f-c8c875f12fb9 service nova] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Received event network-vif-deleted-ebd6c0fe-5181-41be-a80c-55b3b3d0841d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.468955] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.469350] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.470475] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.470784] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.471417] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.474427] env[62525]: INFO nova.compute.manager [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Terminating instance [ 1384.476657] env[62525]: DEBUG nova.compute.manager [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1384.477251] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.478807] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9535acbc-a083-4dfd-8de6-0881f3e97dc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.487765] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.488668] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9135d563-1440-40f1-81a6-2cebe977651e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.494883] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1384.494883] env[62525]: value = "task-1781037" [ 1384.494883] env[62525]: _type = "Task" [ 1384.494883] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.502536] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1781037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.520587] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.520907] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquired lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.521053] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1384.560469] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781036, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.598202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.768917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "cfae9bf8-012a-4286-b978-bba8a913bba2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.768917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.769459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.769459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.770426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.774351] env[62525]: INFO nova.compute.manager [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Terminating instance [ 1384.776931] env[62525]: DEBUG nova.compute.manager [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1384.777624] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.778290] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c853fa-bb86-4947-85c9-cd6ace7e0097 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.789037] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.789037] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4facfc69-44cf-4528-9071-06cc1ac43c99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.798178] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1384.798178] env[62525]: value = "task-1781038" [ 1384.798178] env[62525]: _type = "Task" [ 1384.798178] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.807951] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.812850] env[62525]: DEBUG nova.scheduler.client.report [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 54 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1384.813166] env[62525]: DEBUG nova.compute.provider_tree [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 54 to 55 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1384.813372] env[62525]: DEBUG nova.compute.provider_tree [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1385.005577] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1781037, 'name': PowerOffVM_Task, 'duration_secs': 0.336678} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.006103] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.006103] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.006381] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34235a3b-ace4-40db-9c44-568a6776ed59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.063076] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781036, 'name': CreateVM_Task, 'duration_secs': 1.694257} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.063293] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1385.063951] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.064126] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.064596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1385.064959] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76b4ab07-a4af-42ec-99e3-3a5f8260a0c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.067714] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.073075] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1385.073075] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d5ef8a-b6b1-f22f-3744-fead30c49b64" [ 1385.073075] env[62525]: _type = "Task" [ 1385.073075] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.082478] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d5ef8a-b6b1-f22f-3744-fead30c49b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.095524] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.095768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.095967] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleting the datastore file [datastore1] 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.100024] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9247adab-0693-4893-8ba1-1b2d34c68ee2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.104158] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for the task: (returnval){ [ 1385.104158] env[62525]: value = "task-1781040" [ 1385.104158] env[62525]: _type = "Task" [ 1385.104158] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.117026] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1781040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.245446] env[62525]: DEBUG nova.network.neutron [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Updating instance_info_cache with network_info: [{"id": "0232b819-eeb0-4723-8935-35d0704f301a", "address": "fa:16:3e:e4:3d:66", "network": {"id": "94ad3e95-fca1-48a2-bb83-3f288ae5cf32", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-912370295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bb3230ddac441d5b85601eb9e6b5da8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0232b819-ee", "ovs_interfaceid": "0232b819-eeb0-4723-8935-35d0704f301a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.308389] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781038, 'name': PowerOffVM_Task, 'duration_secs': 0.28342} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.308699] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.308887] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.309118] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc76ecca-77fa-4067-a27b-b8bf9f09f413 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.318772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.023s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.319279] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1385.322036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.970s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.322231] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.324124] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.761s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.325580] env[62525]: INFO nova.compute.claims [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1385.351186] env[62525]: INFO nova.scheduler.client.report [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Deleted allocations for instance a1d1337f-3c41-4c1c-812b-aa10f2a680a8 [ 1385.390473] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.390697] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.390875] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Deleting the datastore file [datastore1] cfae9bf8-012a-4286-b978-bba8a913bba2 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.391181] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aed2aa2-68ab-4de0-8114-1ef0fead0e1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.401532] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for the task: (returnval){ [ 1385.401532] env[62525]: value = "task-1781042" [ 1385.401532] env[62525]: _type = "Task" [ 1385.401532] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.409576] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.584706] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d5ef8a-b6b1-f22f-3744-fead30c49b64, 'name': SearchDatastore_Task, 'duration_secs': 0.011152} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.585083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.585343] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.585581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.586083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.586319] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.586580] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83081c6f-7ab7-40c8-974b-46cd4090bd18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.595207] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.595380] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.596425] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8fb6697-a166-46ac-874d-fb7099c69bf2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.602713] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1385.602713] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5255a2b6-b2c9-10f2-8466-17f2a2025fef" [ 1385.602713] env[62525]: _type = "Task" [ 1385.602713] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.615938] env[62525]: DEBUG oslo_vmware.api [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Task: {'id': task-1781040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192386} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.620179] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.620439] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.620620] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.620789] env[62525]: INFO nova.compute.manager [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1385.621033] env[62525]: DEBUG oslo.service.loopingcall [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.621243] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5255a2b6-b2c9-10f2-8466-17f2a2025fef, 'name': SearchDatastore_Task, 'duration_secs': 0.010013} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.621440] env[62525]: DEBUG nova.compute.manager [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1385.621566] env[62525]: DEBUG nova.network.neutron [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.624103] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d1ada50-f8cc-45e7-a23c-0418c56128e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.631055] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1385.631055] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520b56f3-e62a-03c3-050e-a90f6c238caa" [ 1385.631055] env[62525]: _type = "Task" [ 1385.631055] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.640163] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520b56f3-e62a-03c3-050e-a90f6c238caa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.694898] env[62525]: DEBUG nova.compute.manager [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Received event network-vif-plugged-0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.695134] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Acquiring lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.695354] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.695524] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.695690] env[62525]: DEBUG nova.compute.manager [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] No waiting events found dispatching network-vif-plugged-0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1385.695852] env[62525]: WARNING nova.compute.manager [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Received unexpected event network-vif-plugged-0232b819-eeb0-4723-8935-35d0704f301a for instance with vm_state building and task_state spawning. [ 1385.696016] env[62525]: DEBUG nova.compute.manager [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Received event network-changed-0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.696167] env[62525]: DEBUG nova.compute.manager [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Refreshing instance network info cache due to event network-changed-0232b819-eeb0-4723-8935-35d0704f301a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1385.696331] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Acquiring lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.749245] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Releasing lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.749660] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Instance network_info: |[{"id": "0232b819-eeb0-4723-8935-35d0704f301a", "address": "fa:16:3e:e4:3d:66", "network": {"id": "94ad3e95-fca1-48a2-bb83-3f288ae5cf32", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-912370295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bb3230ddac441d5b85601eb9e6b5da8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0232b819-ee", "ovs_interfaceid": "0232b819-eeb0-4723-8935-35d0704f301a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1385.750041] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Acquired lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.750251] env[62525]: DEBUG nova.network.neutron [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Refreshing network info cache for port 0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1385.751780] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:3d:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0232b819-eeb0-4723-8935-35d0704f301a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1385.760193] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Creating folder: Project (4bb3230ddac441d5b85601eb9e6b5da8). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1385.760860] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e05050fd-6664-4369-9712-a0b1c36b9a1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.773586] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Created folder: Project (4bb3230ddac441d5b85601eb9e6b5da8) in parent group-v369553. [ 1385.773586] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Creating folder: Instances. Parent ref: group-v369651. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1385.773870] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a692c4f-1411-4b4a-abde-8380f3850188 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.785245] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Created folder: Instances in parent group-v369651. [ 1385.785515] env[62525]: DEBUG oslo.service.loopingcall [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.785879] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1385.785957] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37df8d62-4ef0-43d9-b17a-c08b7cb343d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.806229] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1385.806229] env[62525]: value = "task-1781045" [ 1385.806229] env[62525]: _type = "Task" [ 1385.806229] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.815637] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781045, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.830975] env[62525]: DEBUG nova.compute.utils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1385.833361] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1385.833438] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1385.863043] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c89c24f-e6d1-4e54-bfa9-ec2ad3766b8c tempest-ServerExternalEventsTest-1709655928 tempest-ServerExternalEventsTest-1709655928-project-member] Lock "a1d1337f-3c41-4c1c-812b-aa10f2a680a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.690s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.900698] env[62525]: DEBUG nova.policy [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5fcdf907c7d4db6a30fb34815ca5623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33f6a824c10047d99c8a40d21f1c7716', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1385.921333] env[62525]: DEBUG oslo_vmware.api [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Task: {'id': task-1781042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431662} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.921678] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.921884] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.922227] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.923151] env[62525]: INFO nova.compute.manager [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1385.923151] env[62525]: DEBUG oslo.service.loopingcall [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.923151] env[62525]: DEBUG nova.compute.manager [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1385.923151] env[62525]: DEBUG nova.network.neutron [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.985835] env[62525]: DEBUG nova.compute.manager [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.986856] env[62525]: DEBUG nova.compute.manager [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing instance network info cache due to event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1385.986856] env[62525]: DEBUG oslo_concurrency.lockutils [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] Acquiring lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.986856] env[62525]: DEBUG oslo_concurrency.lockutils [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] Acquired lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.986856] env[62525]: DEBUG nova.network.neutron [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.142940] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520b56f3-e62a-03c3-050e-a90f6c238caa, 'name': SearchDatastore_Task, 'duration_secs': 0.010257} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.143267] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.143582] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1/9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.143908] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08c711cc-d845-4c03-a8f6-81f4f2251eef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.151133] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1386.151133] env[62525]: value = "task-1781046" [ 1386.151133] env[62525]: _type = "Task" [ 1386.151133] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.162105] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.317253] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781045, 'name': CreateVM_Task, 'duration_secs': 0.353115} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.317608] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1386.318704] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.319033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.319540] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1386.319900] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f93d04d9-8871-4d2f-87c8-acdf7e01b397 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.325574] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1386.325574] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ee5fa3-07be-55f9-b245-18c546b8bb11" [ 1386.325574] env[62525]: _type = "Task" [ 1386.325574] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.334801] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ee5fa3-07be-55f9-b245-18c546b8bb11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.340278] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1386.430974] env[62525]: DEBUG nova.network.neutron [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.667321] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781046, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.695286] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Successfully created port: 91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1386.853034] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ee5fa3-07be-55f9-b245-18c546b8bb11, 'name': SearchDatastore_Task, 'duration_secs': 0.025543} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.853650] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.854012] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1386.854323] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.854550] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.854831] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.855217] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1261cfb9-992d-430c-b3ee-ad4f3c966df7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.871746] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.871746] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1386.872306] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75485e85-ce7b-441f-ac71-0d647c2ac36b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.880593] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af52d61-116e-44f6-940e-0b3109dc7c88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.887614] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1386.887614] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b89767-b3d5-8f41-e8a4-40a25f560a11" [ 1386.887614] env[62525]: _type = "Task" [ 1386.887614] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.893587] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0a03f0-adfc-4fe1-a639-e03e615e2eb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.900762] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b89767-b3d5-8f41-e8a4-40a25f560a11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.933021] env[62525]: DEBUG nova.network.neutron [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updated VIF entry in instance network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1386.933518] env[62525]: DEBUG nova.network.neutron [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updating instance_info_cache with network_info: [{"id": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "address": "fa:16:3e:38:65:3a", "network": {"id": "d1a3534b-661d-45a0-a8af-3eeea0cf4cab", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1268159643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071a34f637574e2ea213429903097ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5989b4-cf", "ovs_interfaceid": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.935697] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1201eb56-8420-48b2-98ca-8c46905fcd7c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.938739] env[62525]: INFO nova.compute.manager [-] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Took 1.32 seconds to deallocate network for instance. [ 1386.945955] env[62525]: DEBUG nova.network.neutron [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Updated VIF entry in instance network info cache for port 0232b819-eeb0-4723-8935-35d0704f301a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1386.946802] env[62525]: DEBUG nova.network.neutron [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Updating instance_info_cache with network_info: [{"id": "0232b819-eeb0-4723-8935-35d0704f301a", "address": "fa:16:3e:e4:3d:66", "network": {"id": "94ad3e95-fca1-48a2-bb83-3f288ae5cf32", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-912370295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bb3230ddac441d5b85601eb9e6b5da8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0232b819-ee", "ovs_interfaceid": "0232b819-eeb0-4723-8935-35d0704f301a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.952258] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb829ee-343c-45f0-870b-1d08e6d4461f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.974697] env[62525]: DEBUG nova.compute.provider_tree [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.038680] env[62525]: DEBUG nova.network.neutron [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.162652] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.922118} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.162993] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1/9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1387.163317] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1387.163799] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f46027fa-2b49-43b9-b7ff-23e3b5e1a7dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.172062] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1387.172062] env[62525]: value = "task-1781047" [ 1387.172062] env[62525]: _type = "Task" [ 1387.172062] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.180987] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.355043] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1387.390028] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1387.390341] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1387.390450] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.390613] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1387.390749] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.390891] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1387.391108] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1387.391268] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1387.391422] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1387.391581] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1387.392276] env[62525]: DEBUG nova.virt.hardware [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1387.392665] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78bca41-ec1e-45ab-bb4a-cc925bdaccdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.406129] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2d6088-f994-42fa-bfca-a39a4ca1ac5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.409860] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b89767-b3d5-8f41-e8a4-40a25f560a11, 'name': SearchDatastore_Task, 'duration_secs': 0.049809} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.410923] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-542ed108-65b9-4b6d-b8b3-c8e8690752ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.423882] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1387.423882] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a3b4c8-5d8e-924d-743d-3f146acba7c6" [ 1387.423882] env[62525]: _type = "Task" [ 1387.423882] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.431452] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a3b4c8-5d8e-924d-743d-3f146acba7c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.440039] env[62525]: DEBUG oslo_concurrency.lockutils [req-80fb72ed-7f08-4acd-bc9d-0de802a533c0 req-e07e6a05-a4bf-4e11-ba7e-b462df7a9ee7 service nova] Releasing lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.451216] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.451660] env[62525]: DEBUG oslo_concurrency.lockutils [req-2736ebd3-7ea9-4182-b78d-4b5fefb7450f req-7f258721-948e-411b-a4c5-4672a2766366 service nova] Releasing lock "refresh_cache-f2240974-0fa4-4f59-ae0c-b9da52f9600e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.477905] env[62525]: DEBUG nova.scheduler.client.report [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.541561] env[62525]: INFO nova.compute.manager [-] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Took 1.62 seconds to deallocate network for instance. [ 1387.682730] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071288} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.682988] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1387.683851] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedd3315-2098-4abf-83bc-f490bfcda420 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.712931] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1/9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1387.713356] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3a1ca06-20f3-4bb4-a292-5d9f0cd93844 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.741309] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1387.741309] env[62525]: value = "task-1781048" [ 1387.741309] env[62525]: _type = "Task" [ 1387.741309] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.750429] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.935524] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a3b4c8-5d8e-924d-743d-3f146acba7c6, 'name': SearchDatastore_Task, 'duration_secs': 0.028016} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.936199] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.936695] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f2240974-0fa4-4f59-ae0c-b9da52f9600e/f2240974-0fa4-4f59-ae0c-b9da52f9600e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1387.939157] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d056ec22-a21f-4fc3-88f9-335b47fe6790 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.944742] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1387.944742] env[62525]: value = "task-1781049" [ 1387.944742] env[62525]: _type = "Task" [ 1387.944742] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.957940] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.982217] env[62525]: DEBUG nova.compute.manager [req-5905db37-ad55-456b-9525-d807f52cde18 req-33c7a06e-1d81-4876-a9c9-8514ac90b65a service nova] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Received event network-vif-deleted-53bbc3ac-7df3-4d0a-a947-3866d3c4460b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1387.982514] env[62525]: DEBUG nova.compute.manager [req-5905db37-ad55-456b-9525-d807f52cde18 req-33c7a06e-1d81-4876-a9c9-8514ac90b65a service nova] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Received event network-vif-deleted-9c51564d-d867-419a-93d9-ccf23c27f990 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1387.983331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.983823] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1387.986820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.011s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.986993] env[62525]: DEBUG nova.objects.instance [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lazy-loading 'resources' on Instance uuid c70cf2f1-77a9-4eff-981f-9d72caa82c7b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1388.052608] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.115144] env[62525]: DEBUG nova.compute.manager [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Received event network-changed-28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1388.115618] env[62525]: DEBUG nova.compute.manager [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Refreshing instance network info cache due to event network-changed-28c8b4db-8c62-4e51-a573-d0e05371bbd6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1388.115960] env[62525]: DEBUG oslo_concurrency.lockutils [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] Acquiring lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.116229] env[62525]: DEBUG oslo_concurrency.lockutils [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] Acquired lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.116496] env[62525]: DEBUG nova.network.neutron [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Refreshing network info cache for port 28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1388.255114] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781048, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.454733] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.470387] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Successfully updated port: 91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.497911] env[62525]: DEBUG nova.compute.utils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1388.499442] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1388.500141] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1388.543366] env[62525]: DEBUG nova.policy [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19f9c6e6a89841c5b954d33d86c15b5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '099851107d594ed39cef954e6e6e87b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1388.759182] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781048, 'name': ReconfigVM_Task, 'duration_secs': 0.918414} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.759682] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1/9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1388.761230] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15e215c9-77da-4fea-838c-8c047d9b5aa5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.768936] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1388.768936] env[62525]: value = "task-1781050" [ 1388.768936] env[62525]: _type = "Task" [ 1388.768936] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.783889] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781050, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.961316] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781049, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.973399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.973646] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquired lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.974331] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.005758] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1389.033486] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Successfully created port: 6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1389.075495] env[62525]: DEBUG nova.network.neutron [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updated VIF entry in instance network info cache for port 28c8b4db-8c62-4e51-a573-d0e05371bbd6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1389.075898] env[62525]: DEBUG nova.network.neutron [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updating instance_info_cache with network_info: [{"id": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "address": "fa:16:3e:19:b8:51", "network": {"id": "3a2a43e0-d31e-47c0-9712-ccff4732a063", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-50961704-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b47806d16a8d4156922d49d6c23deb7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c8b4db-8c", "ovs_interfaceid": "28c8b4db-8c62-4e51-a573-d0e05371bbd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.095255] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fbb3e1-d0c4-4811-a905-1e687325c971 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.107196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15733845-acf1-424f-8393-c0f13b8a4aa1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.140748] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a183e32f-148d-4338-8734-97394b198d14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.148847] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c2d414-d2af-4bf1-9105-99afa0ba4852 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.163740] env[62525]: DEBUG nova.compute.provider_tree [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.280350] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781050, 'name': Rename_Task, 'duration_secs': 0.370534} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.280884] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1389.281154] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e04848e3-41d9-431b-940f-65011935c0c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.289240] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1389.289240] env[62525]: value = "task-1781051" [ 1389.289240] env[62525]: _type = "Task" [ 1389.289240] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.298957] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.451307] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.451536] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.460618] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781049, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.058395} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.460861] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f2240974-0fa4-4f59-ae0c-b9da52f9600e/f2240974-0fa4-4f59-ae0c-b9da52f9600e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.463864] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.464243] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bc56109-66c3-4d8d-a31d-01c577d00bfb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.471315] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1389.471315] env[62525]: value = "task-1781052" [ 1389.471315] env[62525]: _type = "Task" [ 1389.471315] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.478958] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781052, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.512548] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1389.582837] env[62525]: DEBUG oslo_concurrency.lockutils [req-a1dd0df3-fdc9-4a47-9298-27b31ce10427 req-0db4ebf6-a9e9-41f7-b6ce-3bac8d0d49e2 service nova] Releasing lock "refresh_cache-cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.670298] env[62525]: DEBUG nova.scheduler.client.report [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1389.672924] env[62525]: DEBUG nova.network.neutron [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updating instance_info_cache with network_info: [{"id": "91221eae-8243-44e9-a87d-e67faa8613b5", "address": "fa:16:3e:f4:46:e7", "network": {"id": "0a3c6e1d-9be9-42b2-a251-1ed55e4a4a6e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1079718766-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f6a824c10047d99c8a40d21f1c7716", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91221eae-82", "ovs_interfaceid": "91221eae-8243-44e9-a87d-e67faa8613b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.800066] env[62525]: DEBUG oslo_vmware.api [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781051, 'name': PowerOnVM_Task, 'duration_secs': 0.472599} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.801888] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1389.801888] env[62525]: INFO nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1389.801888] env[62525]: DEBUG nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1389.801888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbe54f4-bb67-4996-bce0-6c820e7a9cd1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.981863] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781052, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.981863] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1389.982653] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7ec000-78f2-43fb-a394-349e58147046 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.005454] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] f2240974-0fa4-4f59-ae0c-b9da52f9600e/f2240974-0fa4-4f59-ae0c-b9da52f9600e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.005778] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a129cd4-0f84-4722-ae36-fb99a733af71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.020957] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1390.030265] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1390.030265] env[62525]: value = "task-1781053" [ 1390.030265] env[62525]: _type = "Task" [ 1390.030265] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.044266] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.052549] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1390.052844] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1390.053020] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1390.053232] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1390.053345] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1390.053485] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1390.053694] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1390.053856] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1390.054031] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1390.054196] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1390.054368] env[62525]: DEBUG nova.virt.hardware [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1390.055248] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811181c3-e579-439a-9771-f2861620e766 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.063702] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687117c6-37e1-417a-af82-c2d5991106aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.176202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.179070] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Releasing lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.179235] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Instance network_info: |[{"id": "91221eae-8243-44e9-a87d-e67faa8613b5", "address": "fa:16:3e:f4:46:e7", "network": {"id": "0a3c6e1d-9be9-42b2-a251-1ed55e4a4a6e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1079718766-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f6a824c10047d99c8a40d21f1c7716", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91221eae-82", "ovs_interfaceid": "91221eae-8243-44e9-a87d-e67faa8613b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1390.180276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.773s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.182032] env[62525]: INFO nova.compute.claims [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1390.185757] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:46:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91221eae-8243-44e9-a87d-e67faa8613b5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.202834] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Creating folder: Project (33f6a824c10047d99c8a40d21f1c7716). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.203606] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48c1ad79-583e-4130-aec5-b997ce6524d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.211087] env[62525]: INFO nova.scheduler.client.report [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Deleted allocations for instance c70cf2f1-77a9-4eff-981f-9d72caa82c7b [ 1390.214798] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Created folder: Project (33f6a824c10047d99c8a40d21f1c7716) in parent group-v369553. [ 1390.215043] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Creating folder: Instances. Parent ref: group-v369654. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.217722] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c749d833-100d-42e7-98df-062359ebd0fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.228893] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Created folder: Instances in parent group-v369654. [ 1390.229158] env[62525]: DEBUG oslo.service.loopingcall [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.229352] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.229559] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b01137a1-246a-4ac5-9dd4-23e76457fa23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.249935] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.249935] env[62525]: value = "task-1781056" [ 1390.249935] env[62525]: _type = "Task" [ 1390.249935] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.258141] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781056, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.297262] env[62525]: DEBUG nova.compute.manager [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Received event network-vif-plugged-91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.297530] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Acquiring lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.297935] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.297935] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.298293] env[62525]: DEBUG nova.compute.manager [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] No waiting events found dispatching network-vif-plugged-91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1390.298458] env[62525]: WARNING nova.compute.manager [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Received unexpected event network-vif-plugged-91221eae-8243-44e9-a87d-e67faa8613b5 for instance with vm_state building and task_state spawning. [ 1390.298581] env[62525]: DEBUG nova.compute.manager [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Received event network-changed-91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.298775] env[62525]: DEBUG nova.compute.manager [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Refreshing instance network info cache due to event network-changed-91221eae-8243-44e9-a87d-e67faa8613b5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1390.299495] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Acquiring lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.299495] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Acquired lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.299495] env[62525]: DEBUG nova.network.neutron [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Refreshing network info cache for port 91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.322272] env[62525]: DEBUG nova.compute.manager [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.322983] env[62525]: DEBUG nova.compute.manager [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing instance network info cache due to event network-changed-7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1390.323302] env[62525]: DEBUG oslo_concurrency.lockutils [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] Acquiring lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.323566] env[62525]: DEBUG oslo_concurrency.lockutils [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] Acquired lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.323827] env[62525]: DEBUG nova.network.neutron [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Refreshing network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.326098] env[62525]: INFO nova.compute.manager [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Took 43.34 seconds to build instance. [ 1390.541067] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781053, 'name': ReconfigVM_Task, 'duration_secs': 0.284732} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.541370] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Reconfigured VM instance instance-00000022 to attach disk [datastore1] f2240974-0fa4-4f59-ae0c-b9da52f9600e/f2240974-0fa4-4f59-ae0c-b9da52f9600e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1390.542013] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4b2d19f-ff73-4919-ab8f-6f4febe39622 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.548448] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1390.548448] env[62525]: value = "task-1781057" [ 1390.548448] env[62525]: _type = "Task" [ 1390.548448] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.557388] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781057, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.569077] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Successfully updated port: 6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1390.724238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2c3ea3-845b-4f4d-a08c-e73450ec8e2a tempest-ImagesNegativeTestJSON-497998007 tempest-ImagesNegativeTestJSON-497998007-project-member] Lock "c70cf2f1-77a9-4eff-981f-9d72caa82c7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.765s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.762867] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781056, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.828766] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b91c917-af8c-4f1c-994b-4f42e5ba8106 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.220s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.066412] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781057, 'name': Rename_Task, 'duration_secs': 0.228766} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.067016] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.067295] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60aac6cd-ce29-4770-bb96-5d8202912b2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.071781] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.071911] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.072078] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1391.074391] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1391.074391] env[62525]: value = "task-1781058" [ 1391.074391] env[62525]: _type = "Task" [ 1391.074391] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.082800] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.191679] env[62525]: DEBUG nova.network.neutron [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updated VIF entry in instance network info cache for port 7f5989b4-cf62-411c-9e0e-1bcbb8f37713. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.192056] env[62525]: DEBUG nova.network.neutron [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updating instance_info_cache with network_info: [{"id": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "address": "fa:16:3e:38:65:3a", "network": {"id": "d1a3534b-661d-45a0-a8af-3eeea0cf4cab", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1268159643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "071a34f637574e2ea213429903097ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5989b4-cf", "ovs_interfaceid": "7f5989b4-cf62-411c-9e0e-1bcbb8f37713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.194228] env[62525]: DEBUG nova.network.neutron [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updated VIF entry in instance network info cache for port 91221eae-8243-44e9-a87d-e67faa8613b5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.194228] env[62525]: DEBUG nova.network.neutron [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updating instance_info_cache with network_info: [{"id": "91221eae-8243-44e9-a87d-e67faa8613b5", "address": "fa:16:3e:f4:46:e7", "network": {"id": "0a3c6e1d-9be9-42b2-a251-1ed55e4a4a6e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1079718766-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f6a824c10047d99c8a40d21f1c7716", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91221eae-82", "ovs_interfaceid": "91221eae-8243-44e9-a87d-e67faa8613b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.262741] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781056, 'name': CreateVM_Task, 'duration_secs': 0.907437} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.263678] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.264388] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.264590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.264920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1391.265193] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6176f99a-0f1b-4c65-9dff-6ad236eb3c69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.270934] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1391.270934] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a62605-238c-a668-ce01-1457e007f4da" [ 1391.270934] env[62525]: _type = "Task" [ 1391.270934] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.280525] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a62605-238c-a668-ce01-1457e007f4da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.333910] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1391.395744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "61f05e69-5e90-47da-9f47-3651b580a23c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.396013] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.589474] env[62525]: DEBUG oslo_vmware.api [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781058, 'name': PowerOnVM_Task, 'duration_secs': 0.493708} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.590319] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.590448] env[62525]: INFO nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1391.591330] env[62525]: DEBUG nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1391.591430] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bd9503-52ad-4540-a58f-44e08c0f0489 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.611417] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1391.693106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f28ea34-f4c7-4d6e-bc86-5c3598117d1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.696189] env[62525]: DEBUG oslo_concurrency.lockutils [req-bb37a6b6-89f4-40c0-9290-29d9d8a38071 req-f5a76ae3-73cd-4e59-bc1d-b875ad83d982 service nova] Releasing lock "refresh_cache-e3f3fc2c-0060-4521-8aa3-da37209aee81" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.704182] env[62525]: DEBUG oslo_concurrency.lockutils [req-fc82a4e0-015c-4471-9720-13a3c72799be req-df5cc268-fd7a-441b-926f-7a12eae9ce70 service nova] Releasing lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.704182] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6bd1ba-6182-4dd8-baa2-99f52067fe39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.735525] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73452acd-5563-438f-99df-dd3992fa3726 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.743560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3c179a-90bf-4727-af7d-8618f0489474 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.760691] env[62525]: DEBUG nova.compute.provider_tree [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.781915] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a62605-238c-a668-ce01-1457e007f4da, 'name': SearchDatastore_Task, 'duration_secs': 0.02471} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.782934] env[62525]: DEBUG nova.network.neutron [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.787022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.787022] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1391.787022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.787022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.787251] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.787251] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1b89396-73fb-49fa-9167-2ffb925bb9b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.796175] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.796393] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1391.797252] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51027fb3-914f-4181-9aa0-d4d3d0d168b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.803601] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1391.803601] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5261c523-83e0-aff6-16cb-1687040dd3b1" [ 1391.803601] env[62525]: _type = "Task" [ 1391.803601] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.811759] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261c523-83e0-aff6-16cb-1687040dd3b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.934034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.948499] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.948851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.949096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.949304] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.949477] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.951557] env[62525]: INFO nova.compute.manager [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Terminating instance [ 1391.954254] env[62525]: DEBUG nova.compute.manager [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1391.954486] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1391.955516] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8771c37b-7270-4ad0-ba0d-0807aaa14315 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.963747] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1391.963994] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6ffa18e-807f-45f7-9278-e1f539f1a16c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.970096] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1391.970096] env[62525]: value = "task-1781059" [ 1391.970096] env[62525]: _type = "Task" [ 1391.970096] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.981368] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.112738] env[62525]: INFO nova.compute.manager [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Took 40.60 seconds to build instance. [ 1392.264680] env[62525]: DEBUG nova.scheduler.client.report [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1392.287229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.288653] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Instance network_info: |[{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1392.288791] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:6d:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1392.295718] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating folder: Project (099851107d594ed39cef954e6e6e87b1). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1392.296798] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37c6be51-3299-4f9b-8f0b-bad9c9e5d62e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.309255] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created folder: Project (099851107d594ed39cef954e6e6e87b1) in parent group-v369553. [ 1392.309427] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating folder: Instances. Parent ref: group-v369657. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1392.312979] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45cc55f2-567d-45ab-a1f1-1f6fe1c1b885 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.320094] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261c523-83e0-aff6-16cb-1687040dd3b1, 'name': SearchDatastore_Task, 'duration_secs': 0.025047} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.320865] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e82866cd-8b24-45bb-9d48-cdc8ad8aa0d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.324756] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created folder: Instances in parent group-v369657. [ 1392.324973] env[62525]: DEBUG oslo.service.loopingcall [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1392.325512] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1392.325706] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ece7998-baa6-43e9-8a63-839fe641ca77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.342928] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1392.342928] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527863db-ff4c-1250-0eef-36277a871467" [ 1392.342928] env[62525]: _type = "Task" [ 1392.342928] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.348755] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1392.348755] env[62525]: value = "task-1781062" [ 1392.348755] env[62525]: _type = "Task" [ 1392.348755] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.357780] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527863db-ff4c-1250-0eef-36277a871467, 'name': SearchDatastore_Task, 'duration_secs': 0.020239} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.358465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.358739] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3455a540-7fbc-46ba-b7d6-84a345c0463e/3455a540-7fbc-46ba-b7d6-84a345c0463e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1392.359031] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fd030a3-7583-4d01-a417-3dd409bfb93c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.364130] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781062, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.369012] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1392.369012] env[62525]: value = "task-1781063" [ 1392.369012] env[62525]: _type = "Task" [ 1392.369012] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.378539] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.396441] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.480441] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781059, 'name': PowerOffVM_Task, 'duration_secs': 0.44682} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.482265] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1392.482265] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1392.482265] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdc2f07e-f496-4c53-9b24-aab69e753de9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.556322] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1392.556571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1392.556757] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Deleting the datastore file [datastore1] e3f3fc2c-0060-4521-8aa3-da37209aee81 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1392.557034] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-088d636b-ef5f-43e1-ae29-05b16513ce98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.565447] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for the task: (returnval){ [ 1392.565447] env[62525]: value = "task-1781065" [ 1392.565447] env[62525]: _type = "Task" [ 1392.565447] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.576502] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.580557] env[62525]: DEBUG nova.compute.manager [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Received event network-vif-plugged-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1392.581229] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Acquiring lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.581791] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.582127] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.583207] env[62525]: DEBUG nova.compute.manager [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] No waiting events found dispatching network-vif-plugged-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1392.583207] env[62525]: WARNING nova.compute.manager [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Received unexpected event network-vif-plugged-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b for instance with vm_state building and task_state spawning. [ 1392.583207] env[62525]: DEBUG nova.compute.manager [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Received event network-changed-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1392.583207] env[62525]: DEBUG nova.compute.manager [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Refreshing instance network info cache due to event network-changed-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1392.583207] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Acquiring lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.583475] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Acquired lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.583562] env[62525]: DEBUG nova.network.neutron [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Refreshing network info cache for port 6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.615189] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c8e6bad-f4ab-474d-ad92-be5944c80b19 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.022s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.618071] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.222s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.619018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.619285] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.619521] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.623553] env[62525]: INFO nova.compute.manager [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Terminating instance [ 1392.627784] env[62525]: DEBUG nova.compute.manager [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1392.627784] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.628369] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ba8ced-456f-4b09-a73b-1c512c142b72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.639289] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.639643] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2e44fd1-8ae3-459a-b30c-761b73c755ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.647122] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1392.647122] env[62525]: value = "task-1781066" [ 1392.647122] env[62525]: _type = "Task" [ 1392.647122] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.656359] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.776130] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.776804] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1392.780576] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.412s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.780915] env[62525]: DEBUG nova.objects.instance [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lazy-loading 'resources' on Instance uuid e34ebddc-2192-4975-81d7-0f5c200f114e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1392.862811] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781062, 'name': CreateVM_Task, 'duration_secs': 0.406039} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.863160] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.864064] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.864221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.864631] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1392.865406] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-564ee32a-13ad-427f-9d91-cd934ce2ca72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.871908] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1392.871908] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d418ff-f457-2e8b-2a96-00d6beae29c8" [ 1392.871908] env[62525]: _type = "Task" [ 1392.871908] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.885866] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781063, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.889433] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d418ff-f457-2e8b-2a96-00d6beae29c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.077996] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.125213] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1393.157699] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781066, 'name': PowerOffVM_Task, 'duration_secs': 0.291747} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.157976] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.158359] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.158642] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87e77b12-b3c9-4f1a-b1bf-a2b510406c2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.254963] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.255259] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.255485] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Deleting the datastore file [datastore1] f2240974-0fa4-4f59-ae0c-b9da52f9600e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.255772] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c5ea3c6-dbfb-47fd-8d17-95f27541b7ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.263027] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for the task: (returnval){ [ 1393.263027] env[62525]: value = "task-1781068" [ 1393.263027] env[62525]: _type = "Task" [ 1393.263027] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.271242] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.288147] env[62525]: DEBUG nova.compute.utils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1393.294481] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1393.294481] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1393.359351] env[62525]: DEBUG nova.network.neutron [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updated VIF entry in instance network info cache for port 6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.359698] env[62525]: DEBUG nova.network.neutron [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.374910] env[62525]: DEBUG nova.policy [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f6e065dce947b2a31313b33a08132c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3900af0b29fa40beb95a4260054c8e5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1393.390246] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569334} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.396108] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3455a540-7fbc-46ba-b7d6-84a345c0463e/3455a540-7fbc-46ba-b7d6-84a345c0463e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1393.400020] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1393.400020] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d418ff-f457-2e8b-2a96-00d6beae29c8, 'name': SearchDatastore_Task, 'duration_secs': 0.050798} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.400020] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8748a6f4-3dd9-4647-b3dd-aa07cefb669d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.400681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.405082] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1393.405426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.405584] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.405801] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1393.406443] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11d8c2c7-1d20-4b2f-ade7-6192e1b21d89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.415629] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1393.415629] env[62525]: value = "task-1781069" [ 1393.415629] env[62525]: _type = "Task" [ 1393.415629] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.425973] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1393.426203] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1393.429867] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74bf77ac-6ee2-4401-9a11-9ede348b4636 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.437723] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.439329] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1393.439329] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f6402f-2177-5fec-23c7-d3513bff31fc" [ 1393.439329] env[62525]: _type = "Task" [ 1393.439329] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.458440] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f6402f-2177-5fec-23c7-d3513bff31fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011342} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.459662] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56d66eb6-7657-43ec-b615-0f09818de321 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.473735] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1393.473735] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b3f3f4-ddaa-47b1-6ded-f1f7631f1e09" [ 1393.473735] env[62525]: _type = "Task" [ 1393.473735] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.489641] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b3f3f4-ddaa-47b1-6ded-f1f7631f1e09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.579158] env[62525]: DEBUG oslo_vmware.api [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Task: {'id': task-1781065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.86689} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.582316] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.582316] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1393.582488] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1393.582764] env[62525]: INFO nova.compute.manager [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1393.583196] env[62525]: DEBUG oslo.service.loopingcall [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.583634] env[62525]: DEBUG nova.compute.manager [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1393.583737] env[62525]: DEBUG nova.network.neutron [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1393.658079] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.780111] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.801885] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1393.842774] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Successfully created port: 7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1393.866343] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa9d350c-d3f7-41ac-bcc6-25bd3c9e830c req-a5338901-fad5-46c9-a8bb-be79ab41e998 service nova] Releasing lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.911783] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6480dd29-03b6-41e9-a3d4-0f11c6725c88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.922896] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8587eb-6796-4f3c-9f43-df7a0cf46162 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.930836] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.957047] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.957840] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b30a163-337e-4376-9d81-d70dea99ea76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.960786] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5c1e85-b0a7-4063-9040-3530b842b320 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.979021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb25c82-3355-4475-a74c-7b9c6ebd368c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.990027] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 3455a540-7fbc-46ba-b7d6-84a345c0463e/3455a540-7fbc-46ba-b7d6-84a345c0463e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.993602] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f02b89f-644d-46a4-a54e-c60773dbe482 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.023056] env[62525]: DEBUG nova.compute.provider_tree [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.027750] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b3f3f4-ddaa-47b1-6ded-f1f7631f1e09, 'name': SearchDatastore_Task, 'duration_secs': 0.01209} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.028418] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1394.028418] env[62525]: value = "task-1781070" [ 1394.028418] env[62525]: _type = "Task" [ 1394.028418] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.029425] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.029425] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1394.029730] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d475e1f-7bd2-44ec-97f2-337515581c98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.041154] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1394.041154] env[62525]: value = "task-1781071" [ 1394.041154] env[62525]: _type = "Task" [ 1394.041154] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.044750] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.053817] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.275303] env[62525]: DEBUG oslo_vmware.api [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Task: {'id': task-1781068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.526585} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.275303] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.275303] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.275812] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.276142] env[62525]: INFO nova.compute.manager [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1394.276442] env[62525]: DEBUG oslo.service.loopingcall [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.276635] env[62525]: DEBUG nova.compute.manager [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1394.276724] env[62525]: DEBUG nova.network.neutron [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.511566] env[62525]: DEBUG nova.network.neutron [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.530112] env[62525]: DEBUG nova.scheduler.client.report [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1394.544101] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781070, 'name': ReconfigVM_Task, 'duration_secs': 0.286776} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.544402] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 3455a540-7fbc-46ba-b7d6-84a345c0463e/3455a540-7fbc-46ba-b7d6-84a345c0463e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.545693] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7588fbb-60ab-4fb9-9b3c-ab2d1ab6782a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.557731] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781071, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.560297] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1394.560297] env[62525]: value = "task-1781072" [ 1394.560297] env[62525]: _type = "Task" [ 1394.560297] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.571659] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781072, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.634181] env[62525]: DEBUG nova.compute.manager [req-8d65d9f1-7183-4407-9427-b9640f3643b8 req-b2119b89-ec32-4e29-ae2f-a962842db6fa service nova] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Received event network-vif-deleted-7f5989b4-cf62-411c-9e0e-1bcbb8f37713 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.634456] env[62525]: DEBUG nova.compute.manager [req-8d65d9f1-7183-4407-9427-b9640f3643b8 req-b2119b89-ec32-4e29-ae2f-a962842db6fa service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Received event network-vif-deleted-0232b819-eeb0-4723-8935-35d0704f301a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.634652] env[62525]: INFO nova.compute.manager [req-8d65d9f1-7183-4407-9427-b9640f3643b8 req-b2119b89-ec32-4e29-ae2f-a962842db6fa service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Neutron deleted interface 0232b819-eeb0-4723-8935-35d0704f301a; detaching it from the instance and deleting it from the info cache [ 1394.634839] env[62525]: DEBUG nova.network.neutron [req-8d65d9f1-7183-4407-9427-b9640f3643b8 req-b2119b89-ec32-4e29-ae2f-a962842db6fa service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.814583] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1394.840426] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1394.840675] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1394.841278] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.841278] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1394.841278] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.841278] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1394.841487] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1394.841655] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1394.841809] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1394.841964] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1394.842159] env[62525]: DEBUG nova.virt.hardware [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1394.843074] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6e7b10-b146-4430-9055-1709cf0655af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.852113] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26878d51-2bb8-4164-b122-c28311417443 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.014555] env[62525]: INFO nova.compute.manager [-] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Took 1.43 seconds to deallocate network for instance. [ 1395.038675] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.258s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.041065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.375s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.042682] env[62525]: INFO nova.compute.claims [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1395.057124] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.928542} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.057367] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1395.058859] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1395.058859] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4607a5a3-cece-46f7-8d6b-7e44714a0372 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.067067] env[62525]: DEBUG nova.network.neutron [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.067463] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1395.067463] env[62525]: value = "task-1781073" [ 1395.067463] env[62525]: _type = "Task" [ 1395.067463] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.069325] env[62525]: INFO nova.scheduler.client.report [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Deleted allocations for instance e34ebddc-2192-4975-81d7-0f5c200f114e [ 1395.080385] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781072, 'name': Rename_Task, 'duration_secs': 0.164156} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.081731] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.082041] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eac37a40-d605-4fe1-98c5-4db0a73d274e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.087221] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.092126] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1395.092126] env[62525]: value = "task-1781074" [ 1395.092126] env[62525]: _type = "Task" [ 1395.092126] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.103293] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.138573] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9eb21edb-d208-463e-9feb-429060e8cf96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.147842] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5d8ba6-6558-441f-8356-fdca1faac0d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.181768] env[62525]: DEBUG nova.compute.manager [req-8d65d9f1-7183-4407-9427-b9640f3643b8 req-b2119b89-ec32-4e29-ae2f-a962842db6fa service nova] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Detach interface failed, port_id=0232b819-eeb0-4723-8935-35d0704f301a, reason: Instance f2240974-0fa4-4f59-ae0c-b9da52f9600e could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1395.460549] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Successfully updated port: 7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.521833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.577201] env[62525]: INFO nova.compute.manager [-] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Took 1.30 seconds to deallocate network for instance. [ 1395.580673] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64fd2836-5153-4b40-ab10-6febbe194af1 tempest-ServerAddressesNegativeTestJSON-229864423 tempest-ServerAddressesNegativeTestJSON-229864423-project-member] Lock "e34ebddc-2192-4975-81d7-0f5c200f114e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.206s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.594667] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073145} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.598756] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1395.602872] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00751fbc-c765-409f-8389-4b6f788bf08c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.611183] env[62525]: DEBUG oslo_vmware.api [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781074, 'name': PowerOnVM_Task, 'duration_secs': 0.51526} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.623681] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.624075] env[62525]: INFO nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1395.624266] env[62525]: DEBUG nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.633611] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.635027] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92adba9b-8335-408f-9fac-2a303a403e2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.637702] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7d00cf8-21f4-4a57-b3bf-3568abc341b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.661601] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1395.661601] env[62525]: value = "task-1781075" [ 1395.661601] env[62525]: _type = "Task" [ 1395.661601] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.670519] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781075, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.965128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.965128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.965128] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.095797] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.115433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.115661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.174509] env[62525]: INFO nova.compute.manager [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Took 41.99 seconds to build instance. [ 1396.180349] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781075, 'name': ReconfigVM_Task, 'duration_secs': 0.512416} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.180349] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfigured VM instance instance-00000024 to attach disk [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.180741] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a675824-d486-49e7-bd45-b25fdbe53962 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.187834] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1396.187834] env[62525]: value = "task-1781076" [ 1396.187834] env[62525]: _type = "Task" [ 1396.187834] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.198605] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781076, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.388032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.388128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.513888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ea8602-9eb7-4852-b1c0-1e328a5dbf57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.517207] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.524429] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcca138-9edb-4c70-80ff-5605e1d47b97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.559362] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334a3c53-9973-48e3-99a5-b8013855cf95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.567635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49379c22-2bfd-4336-a317-fd8b7759195e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.581809] env[62525]: DEBUG nova.compute.provider_tree [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.682672] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8cdd69f3-d9c0-424b-b622-62b58b5bf168 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.317s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.698969] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781076, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.702047] env[62525]: DEBUG nova.compute.manager [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Received event network-vif-plugged-7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.702306] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.702519] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.702684] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.702851] env[62525]: DEBUG nova.compute.manager [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] No waiting events found dispatching network-vif-plugged-7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1396.703018] env[62525]: WARNING nova.compute.manager [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Received unexpected event network-vif-plugged-7729ee20-ba8a-4607-95dd-4f5418171e89 for instance with vm_state building and task_state spawning. [ 1396.704915] env[62525]: DEBUG nova.compute.manager [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Received event network-changed-7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.704915] env[62525]: DEBUG nova.compute.manager [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Refreshing instance network info cache due to event network-changed-7729ee20-ba8a-4607-95dd-4f5418171e89. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.704915] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.771100] env[62525]: DEBUG nova.network.neutron [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.086041] env[62525]: DEBUG nova.scheduler.client.report [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1397.186423] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1397.199504] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781076, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.204936] env[62525]: DEBUG nova.compute.manager [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Received event network-changed-91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.205324] env[62525]: DEBUG nova.compute.manager [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Refreshing instance network info cache due to event network-changed-91221eae-8243-44e9-a87d-e67faa8613b5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1397.205426] env[62525]: DEBUG oslo_concurrency.lockutils [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] Acquiring lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.205547] env[62525]: DEBUG oslo_concurrency.lockutils [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] Acquired lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.205714] env[62525]: DEBUG nova.network.neutron [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Refreshing network info cache for port 91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.272963] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.273321] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Instance network_info: |[{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1397.273653] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.273785] env[62525]: DEBUG nova.network.neutron [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Refreshing network info cache for port 7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.274924] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:d7:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7729ee20-ba8a-4607-95dd-4f5418171e89', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.285838] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating folder: Project (3900af0b29fa40beb95a4260054c8e5b). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.291027] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98292010-ddca-44bb-913b-3e6e36cdaac9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.302304] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created folder: Project (3900af0b29fa40beb95a4260054c8e5b) in parent group-v369553. [ 1397.302492] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating folder: Instances. Parent ref: group-v369660. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.304617] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c434b414-7759-4d71-b92a-82d4b39bdedf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.313172] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created folder: Instances in parent group-v369660. [ 1397.313431] env[62525]: DEBUG oslo.service.loopingcall [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.313633] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.313903] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6d0890e-63b1-4cfc-8309-ca852d0b2460 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.333811] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.333811] env[62525]: value = "task-1781079" [ 1397.333811] env[62525]: _type = "Task" [ 1397.333811] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.341837] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781079, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.595026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.595026] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1397.596984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.129s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.600746] env[62525]: INFO nova.compute.claims [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.702809] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781076, 'name': Rename_Task, 'duration_secs': 1.06559} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.703403] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1397.703403] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43688dce-b0c4-42f2-885c-27ab999c67c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.711577] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1397.711577] env[62525]: value = "task-1781080" [ 1397.711577] env[62525]: _type = "Task" [ 1397.711577] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.716329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.724027] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.848700] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781079, 'name': CreateVM_Task, 'duration_secs': 0.322705} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.851874] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.852098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.852534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.853296] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.853915] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f996a1e-f221-4e27-b87f-d4d75958095d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.862451] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1397.862451] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529d3732-e4f8-2203-609b-ba75fd831c14" [ 1397.862451] env[62525]: _type = "Task" [ 1397.862451] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.873214] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529d3732-e4f8-2203-609b-ba75fd831c14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.104919] env[62525]: DEBUG nova.compute.utils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1398.108189] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1398.108358] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1398.180905] env[62525]: DEBUG nova.network.neutron [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updated VIF entry in instance network info cache for port 91221eae-8243-44e9-a87d-e67faa8613b5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.180905] env[62525]: DEBUG nova.network.neutron [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updating instance_info_cache with network_info: [{"id": "91221eae-8243-44e9-a87d-e67faa8613b5", "address": "fa:16:3e:f4:46:e7", "network": {"id": "0a3c6e1d-9be9-42b2-a251-1ed55e4a4a6e", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1079718766-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33f6a824c10047d99c8a40d21f1c7716", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91221eae-82", "ovs_interfaceid": "91221eae-8243-44e9-a87d-e67faa8613b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.193666] env[62525]: DEBUG nova.network.neutron [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updated VIF entry in instance network info cache for port 7729ee20-ba8a-4607-95dd-4f5418171e89. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.195477] env[62525]: DEBUG nova.network.neutron [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.217413] env[62525]: DEBUG nova.policy [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19f9c6e6a89841c5b954d33d86c15b5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '099851107d594ed39cef954e6e6e87b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1398.225854] env[62525]: DEBUG oslo_vmware.api [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781080, 'name': PowerOnVM_Task, 'duration_secs': 0.497538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.226140] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1398.226348] env[62525]: INFO nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1398.226527] env[62525]: DEBUG nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1398.227335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8736f58-d6ac-418d-9be5-3c89faa309e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.374447] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529d3732-e4f8-2203-609b-ba75fd831c14, 'name': SearchDatastore_Task, 'duration_secs': 0.02813} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.374739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.375024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.375303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.375506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.375720] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.376018] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48c6a0ba-4e05-4e4a-9b2d-0bded21f24c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.384655] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.384851] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.385644] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b19d1dd-881c-413a-ba7a-9da8fdf82bab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.391210] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1398.391210] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529bce44-cdc0-066c-cba9-92ab32bbf10f" [ 1398.391210] env[62525]: _type = "Task" [ 1398.391210] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.399686] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529bce44-cdc0-066c-cba9-92ab32bbf10f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.613985] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1398.683453] env[62525]: DEBUG oslo_concurrency.lockutils [req-c5226bae-258c-4110-bd09-f36df1745d08 req-f0f0819e-e04e-4c5c-ad4e-985ff13889a0 service nova] Releasing lock "refresh_cache-3455a540-7fbc-46ba-b7d6-84a345c0463e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.701060] env[62525]: DEBUG oslo_concurrency.lockutils [req-550951a4-e103-4736-a589-22830d5af76e req-285284b8-b28b-42d4-8533-8057eb0e9317 service nova] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.749101] env[62525]: INFO nova.compute.manager [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Took 42.21 seconds to build instance. [ 1398.876826] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Successfully created port: 988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.906180] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529bce44-cdc0-066c-cba9-92ab32bbf10f, 'name': SearchDatastore_Task, 'duration_secs': 0.008241} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.909472] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac02d2a0-08af-4799-9cdc-df3870960e97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.916520] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1398.916520] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c8348c-e8f6-ddd1-071b-d50216c74da3" [ 1398.916520] env[62525]: _type = "Task" [ 1398.916520] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.925897] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c8348c-e8f6-ddd1-071b-d50216c74da3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.164081] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d900a85-47e0-49e2-b57b-641a693c4c5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.173487] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c08f65f-e933-4046-94fe-d0ee2bfd15e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.207661] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec9ebbb-ef17-4907-8ec3-125a15812b2c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.216492] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba02e2d-151d-4dd7-bbf0-0a4d5a5fb7f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.231175] env[62525]: DEBUG nova.compute.provider_tree [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.252174] env[62525]: DEBUG oslo_concurrency.lockutils [None req-802da9dc-1288-4439-b621-297e946c1f27 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.879s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.425974] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c8348c-e8f6-ddd1-071b-d50216c74da3, 'name': SearchDatastore_Task, 'duration_secs': 0.009462} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.425974] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.425974] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1399.425974] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25d043b2-799b-49bc-95d6-a8fc05a1ffbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.434130] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1399.434130] env[62525]: value = "task-1781081" [ 1399.434130] env[62525]: _type = "Task" [ 1399.434130] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.443155] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.630966] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1399.655851] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1399.656179] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1399.656348] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1399.656528] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1399.656678] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1399.656824] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1399.657039] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1399.657199] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1399.657361] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1399.657520] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1399.657719] env[62525]: DEBUG nova.virt.hardware [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1399.658582] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c5fbcb-5055-494b-856e-9adf377f36c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.666657] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678fca3f-f47a-4d9a-b23c-da63b9c88185 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.734245] env[62525]: DEBUG nova.scheduler.client.report [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.754502] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1399.944484] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.239885] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.240539] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1400.243340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.506s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.277881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.444473] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.470030] env[62525]: DEBUG nova.compute.manager [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Received event network-vif-plugged-988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.470122] env[62525]: DEBUG oslo_concurrency.lockutils [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] Acquiring lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.470428] env[62525]: DEBUG oslo_concurrency.lockutils [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] Lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.470485] env[62525]: DEBUG oslo_concurrency.lockutils [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] Lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.471373] env[62525]: DEBUG nova.compute.manager [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] No waiting events found dispatching network-vif-plugged-988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1400.471373] env[62525]: WARNING nova.compute.manager [req-1ba7fa51-8a51-432c-98e1-967e1673e5a8 req-2abde7a9-6a34-4f1f-a534-5a5bc0a84698 service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Received unexpected event network-vif-plugged-988d8ed1-10c8-470a-81b1-0d6c0839c35b for instance with vm_state building and task_state spawning. [ 1400.580201] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Successfully updated port: 988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1400.715958] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643063b1-8dac-4b7e-8429-080db99eb400 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.725021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72913024-d444-40f8-81e8-aa582500ba7f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.761604] env[62525]: DEBUG nova.compute.utils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1400.763338] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1400.763517] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1400.765950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba172fdb-f624-4deb-8278-f63d7fa019f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.775845] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5a0b7f-544a-44fc-bee0-e1dfd5e2b36e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.792190] env[62525]: DEBUG nova.compute.provider_tree [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.815225] env[62525]: DEBUG nova.policy [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fb23da45e3d414d8a1d23b8b00e0bf6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18483247e96e4263b0d32088d19debf6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1400.945200] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781081, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.384534} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.945506] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1400.945737] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1400.946022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f02c9f94-eecb-4e2f-b9f8-370793dc5db1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.953844] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1400.953844] env[62525]: value = "task-1781082" [ 1400.953844] env[62525]: _type = "Task" [ 1400.953844] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.962695] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.083169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.083340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.083489] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1401.269728] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Successfully created port: 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1401.271941] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1401.295470] env[62525]: DEBUG nova.scheduler.client.report [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1401.463372] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066284} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.463684] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1401.464400] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26093dd-cbdf-4043-bef1-73b4f44829e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.486862] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.487198] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdc0f07c-02a0-4465-93ad-b700d8634124 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.507305] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1401.507305] env[62525]: value = "task-1781083" [ 1401.507305] env[62525]: _type = "Task" [ 1401.507305] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.515270] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.614063] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.746144] env[62525]: DEBUG nova.network.neutron [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updating instance_info_cache with network_info: [{"id": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "address": "fa:16:3e:8d:a5:b5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap988d8ed1-10", "ovs_interfaceid": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.800178] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.557s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.800321] env[62525]: INFO nova.compute.manager [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Successfully reverted task state from image_uploading on failure for instance. [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server [None req-9289d67f-c3a9-4a12-8a7b-da2262b4e7e0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created [ 1401.805422] env[62525]: Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created' [ 1401.805422] env[62525]: Faults: [ManagedObjectNotFound] [ 1401.805422] env[62525]: Details: {'obj': 'snapshot-369633'} [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server response = request(managed_object, **kwargs) [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server return client.invoke(args, kwargs) [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server result = self.send(soapenv, timeout=timeout) [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server return self.process_reply(reply.message, None, None) [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server raise WebFault(fault, replyroot) [ 1401.805422] env[62525]: ERROR oslo_messaging.rpc.server suds.WebFault: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created' [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server return api_method(*args, **kwargs) [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server raise exceptions.VimFaultException(fault_list, fault_string, [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.VimFaultException: The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created' [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-369633'} [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1401.805883] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server raise self.value [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server raise self.value [ 1401.806340] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server raise self.value [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server raise self.value [ 1401.806766] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 933, in _delete_vm_snapshot [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server delete_snapshot_task = self._session._call_method( [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 127, in _call_method [ 1401.807189] env[62525]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception() as ctxt: [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server raise self.value [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server return self.invoke_api(module, method, *args, **kwargs) [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server return _invoke_api(module, method, *args, **kwargs) [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server idle = self.f(*self.args, **self.kw) [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 1401.807583] env[62525]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server raise clazz(str(excep), [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-369633' has already been deleted or has not been completely created' [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-369633'} [ 1401.808038] env[62525]: ERROR oslo_messaging.rpc.server [ 1401.808038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.690s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.808038] env[62525]: DEBUG nova.objects.instance [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'resources' on Instance uuid 84fbb408-7810-4166-a53e-242d51f60322 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1402.018060] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781083, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.248616] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.249011] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Instance network_info: |[{"id": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "address": "fa:16:3e:8d:a5:b5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap988d8ed1-10", "ovs_interfaceid": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1402.249450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:a5:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '988d8ed1-10c8-470a-81b1-0d6c0839c35b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1402.257211] env[62525]: DEBUG oslo.service.loopingcall [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.257424] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1402.257642] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cde6246c-e829-45f5-af17-c3056ecce973 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.280047] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1402.280047] env[62525]: value = "task-1781084" [ 1402.280047] env[62525]: _type = "Task" [ 1402.280047] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.286171] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1402.290804] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781084, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.319175] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1402.319451] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1402.319643] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1402.319860] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1402.320047] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1402.320234] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1402.320469] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1402.320652] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1402.320853] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1402.321135] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1402.321375] env[62525]: DEBUG nova.virt.hardware [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1402.322688] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed8e5d7-5e3c-41b9-a6fa-9b0bf4c350c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.330630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0753b97f-987b-483c-bde8-be61efd115a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.498633] env[62525]: DEBUG nova.compute.manager [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Received event network-changed-988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1402.498725] env[62525]: DEBUG nova.compute.manager [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Refreshing instance network info cache due to event network-changed-988d8ed1-10c8-470a-81b1-0d6c0839c35b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1402.498941] env[62525]: DEBUG oslo_concurrency.lockutils [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] Acquiring lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.499106] env[62525]: DEBUG oslo_concurrency.lockutils [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] Acquired lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.499264] env[62525]: DEBUG nova.network.neutron [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Refreshing network info cache for port 988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.518256] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781083, 'name': ReconfigVM_Task, 'duration_secs': 0.641846} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.520808] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.521632] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9147661e-2131-4f68-87e2-5a507cf54d34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.528791] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1402.528791] env[62525]: value = "task-1781085" [ 1402.528791] env[62525]: _type = "Task" [ 1402.528791] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.539648] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781085, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.758074] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a69b7ec-13d2-4097-a36b-a079e5b15b0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.765843] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8747ee43-af88-4b3c-bbfd-3132eecfa18e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.808455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a01f92-e243-47d0-825e-909c8ae21234 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.816851] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781084, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.820082] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063b75c6-a784-40aa-bc6d-e1214e0d616b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.833841] env[62525]: DEBUG nova.compute.provider_tree [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.891696] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Successfully updated port: 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1403.039014] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781085, 'name': Rename_Task, 'duration_secs': 0.163078} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.042377] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1403.042672] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-641d346d-66ff-413d-ac8e-db74b58b80ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.049461] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1403.049461] env[62525]: value = "task-1781086" [ 1403.049461] env[62525]: _type = "Task" [ 1403.049461] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.056621] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781086, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.316486] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781084, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.326227] env[62525]: DEBUG nova.network.neutron [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updated VIF entry in instance network info cache for port 988d8ed1-10c8-470a-81b1-0d6c0839c35b. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.326562] env[62525]: DEBUG nova.network.neutron [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updating instance_info_cache with network_info: [{"id": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "address": "fa:16:3e:8d:a5:b5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap988d8ed1-10", "ovs_interfaceid": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.337240] env[62525]: DEBUG nova.scheduler.client.report [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1403.393897] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.394077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.394294] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1403.559727] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781086, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.824851] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781084, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.830112] env[62525]: DEBUG oslo_concurrency.lockutils [req-d7d278fa-a580-4711-b6b8-5bb0cb6c5883 req-f9b9d6fb-94d4-4e3d-81aa-880d38eb858f service nova] Releasing lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.842541] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.844397] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.071s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.844577] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.844734] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1403.845237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.971s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.845523] env[62525]: DEBUG nova.objects.instance [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lazy-loading 'resources' on Instance uuid 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1403.847623] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e2f5fc-ac36-4712-b57f-c0c1e3cecfb9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.858980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ac1e51-1400-4ca4-a5b4-5f8f8b1677c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.873824] env[62525]: INFO nova.scheduler.client.report [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted allocations for instance 84fbb408-7810-4166-a53e-242d51f60322 [ 1403.875508] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddd6421-2b88-4a76-9d26-de6d2be94937 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.888426] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c261a6-e1dc-474e-bc20-581631a02557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.921152] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179055MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1403.921399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.958572] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1404.059536] env[62525]: DEBUG oslo_vmware.api [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781086, 'name': PowerOnVM_Task, 'duration_secs': 0.579402} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.059810] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1404.060019] env[62525]: INFO nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1404.060225] env[62525]: DEBUG nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1404.060975] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1626e7d4-ea1f-4a4f-ab4a-2c381d1c330f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.115253] env[62525]: DEBUG nova.network.neutron [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.319266] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781084, 'name': CreateVM_Task, 'duration_secs': 2.01763} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.319266] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.320334] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.321089] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.321089] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1404.321198] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60cf810f-d905-4502-9102-7c31a50bfe76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.326977] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1404.326977] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5299c8ad-4d74-20cf-5e52-b2ad07bf77ce" [ 1404.326977] env[62525]: _type = "Task" [ 1404.326977] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.334961] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5299c8ad-4d74-20cf-5e52-b2ad07bf77ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.391949] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f63ff5db-23c6-4900-9dad-ef08dbd065a2 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "84fbb408-7810-4166-a53e-242d51f60322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.740s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.559014] env[62525]: DEBUG nova.compute.manager [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-vif-plugged-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.559220] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Acquiring lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.559825] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.559825] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.559825] env[62525]: DEBUG nova.compute.manager [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] No waiting events found dispatching network-vif-plugged-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1404.559978] env[62525]: WARNING nova.compute.manager [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received unexpected event network-vif-plugged-3e9589f0-83a5-4985-ac44-61ded6abf83e for instance with vm_state building and task_state spawning. [ 1404.560036] env[62525]: DEBUG nova.compute.manager [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.560775] env[62525]: DEBUG nova.compute.manager [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing instance network info cache due to event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1404.560775] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.580346] env[62525]: INFO nova.compute.manager [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Took 43.20 seconds to build instance. [ 1404.618326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.618638] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Instance network_info: |[{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1404.619197] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.619380] env[62525]: DEBUG nova.network.neutron [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.620558] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:dd:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60bdba1a-14cf-46b2-9d8b-aeaf4d80c815', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e9589f0-83a5-4985-ac44-61ded6abf83e', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.628358] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Creating folder: Project (18483247e96e4263b0d32088d19debf6). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.629715] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cbc5cc1-1540-4884-a6ec-ca4ac2f63f76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.644296] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Created folder: Project (18483247e96e4263b0d32088d19debf6) in parent group-v369553. [ 1404.644491] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Creating folder: Instances. Parent ref: group-v369664. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.646860] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-413d144e-3f59-4e9a-959e-7c3bd18a9bf2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.657144] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Created folder: Instances in parent group-v369664. [ 1404.658903] env[62525]: DEBUG oslo.service.loopingcall [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.658903] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.658903] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb1e7509-966c-4cf7-ba04-355dc56fab54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.681622] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.681622] env[62525]: value = "task-1781089" [ 1404.681622] env[62525]: _type = "Task" [ 1404.681622] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.690346] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781089, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.815820] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8b2674-a55e-49ac-9fcf-8c2e3385b116 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.823648] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46c1028-d2bd-473d-ad8a-7cd8367a3a19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.836297] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5299c8ad-4d74-20cf-5e52-b2ad07bf77ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010341} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.863336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.863606] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.863954] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.864047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.864215] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.864658] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f43c83b2-d0be-4132-aded-e9d5bb8ab1cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.867541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76910a5c-cff4-4922-a77d-b37edd12cb34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.875298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345f1fd2-de3a-419c-9d64-206985e10f41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.880349] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.881075] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.881629] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daaad83f-d238-469a-afb3-650fa7b52dc3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.893689] env[62525]: DEBUG nova.compute.provider_tree [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.897977] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1404.897977] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52095e5c-a3ed-6b07-18dd-a3531abe7023" [ 1404.897977] env[62525]: _type = "Task" [ 1404.897977] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.907169] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52095e5c-a3ed-6b07-18dd-a3531abe7023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.082838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-fde93ce3-bc15-442c-81fb-0d7f515d7b72 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.389s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.190215] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781089, 'name': CreateVM_Task, 'duration_secs': 0.34635} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.190390] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.191059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.191233] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.191556] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1405.191793] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de664784-6959-4711-9d2f-bc991d086c24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.196090] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1405.196090] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522d9b3c-39c9-4341-c4c9-dca820b5643e" [ 1405.196090] env[62525]: _type = "Task" [ 1405.196090] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.203883] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522d9b3c-39c9-4341-c4c9-dca820b5643e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.397390] env[62525]: DEBUG nova.scheduler.client.report [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1405.411434] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52095e5c-a3ed-6b07-18dd-a3531abe7023, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.412922] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d528d721-a68e-4960-ae9e-a8580999e722 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.419147] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1405.419147] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523b12e2-b7bf-e8ea-86e0-a8931f5fddfb" [ 1405.419147] env[62525]: _type = "Task" [ 1405.419147] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.427193] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b12e2-b7bf-e8ea-86e0-a8931f5fddfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.588314] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1405.708617] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522d9b3c-39c9-4341-c4c9-dca820b5643e, 'name': SearchDatastore_Task, 'duration_secs': 0.015681} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.708975] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.709235] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1405.709445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.744105] env[62525]: DEBUG nova.network.neutron [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updated VIF entry in instance network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.744495] env[62525]: DEBUG nova.network.neutron [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.906340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.061s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.910014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.271s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.910345] env[62525]: DEBUG nova.objects.instance [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lazy-loading 'resources' on Instance uuid 35a2e221-e1c5-49d9-af93-5e5f28c62b8f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.930416] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b12e2-b7bf-e8ea-86e0-a8931f5fddfb, 'name': SearchDatastore_Task, 'duration_secs': 0.010838} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.931465] env[62525]: INFO nova.scheduler.client.report [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Deleted allocations for instance 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b [ 1405.932372] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.932804] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/2f589dc1-9244-475f-86d0-4b69b511508b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.932911] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.933089] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1405.933295] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-111a0ea2-6b87-4bfa-b003-edc4c947d7c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.940905] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6e3aeeb-b7e8-4009-9975-a54f5ab9f00d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.948871] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1405.948871] env[62525]: value = "task-1781090" [ 1405.948871] env[62525]: _type = "Task" [ 1405.948871] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.954299] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1405.954559] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1405.955783] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fed5bed6-de4b-45b3-9b34-deb7e2c18703 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.964300] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.967646] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1405.967646] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ea5e8b-c888-924b-0976-0bcf61e231db" [ 1405.967646] env[62525]: _type = "Task" [ 1405.967646] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.977627] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ea5e8b-c888-924b-0976-0bcf61e231db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.110150] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.247861] env[62525]: DEBUG oslo_concurrency.lockutils [req-62817cd5-161d-49c5-958b-016fff73b951 req-e0b0c4e3-42d5-4bf9-bc46-0409b5799935 service nova] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.451069] env[62525]: DEBUG oslo_concurrency.lockutils [None req-419641ee-0436-427e-82ed-ce24ac9ae469 tempest-ServerTagsTestJSON-1878572358 tempest-ServerTagsTestJSON-1878572358-project-member] Lock "2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.355s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.463952] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781090, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.480083] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ea5e8b-c888-924b-0976-0bcf61e231db, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.481123] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4afb832-5dbc-448b-a4ff-73b02a0c0c30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.492142] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1406.492142] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c7a28c-b4f3-edd4-4867-ad8923b25a7c" [ 1406.492142] env[62525]: _type = "Task" [ 1406.492142] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.503194] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c7a28c-b4f3-edd4-4867-ad8923b25a7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.592565] env[62525]: DEBUG nova.compute.manager [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Received event network-changed-7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1406.592732] env[62525]: DEBUG nova.compute.manager [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Refreshing instance network info cache due to event network-changed-7729ee20-ba8a-4607-95dd-4f5418171e89. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1406.592957] env[62525]: DEBUG oslo_concurrency.lockutils [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.593116] env[62525]: DEBUG oslo_concurrency.lockutils [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.593276] env[62525]: DEBUG nova.network.neutron [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Refreshing network info cache for port 7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1406.909591] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5d7990-7153-4dee-a2a2-1c6ac42b06ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.920254] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd673568-da18-4445-9a33-4a5ba9b6333b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.968921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e13c8b-0bf2-43ef-a00b-682209e8cb8b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.979243] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684507} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.981915] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/2f589dc1-9244-475f-86d0-4b69b511508b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.982258] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.982617] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95268e03-5d1f-4a24-8459-7507df333a3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.985919] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8360d69d-34f4-471a-b2a4-346788f7ccd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.001120] env[62525]: DEBUG nova.compute.provider_tree [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.006857] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1407.006857] env[62525]: value = "task-1781091" [ 1407.006857] env[62525]: _type = "Task" [ 1407.006857] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.015694] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c7a28c-b4f3-edd4-4867-ad8923b25a7c, 'name': SearchDatastore_Task, 'duration_secs': 0.066394} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.016145] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.016662] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d2e7c558-02af-477c-b996-239ef14ed75b/d2e7c558-02af-477c-b996-239ef14ed75b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.016662] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4c31031-5876-48ee-aa97-d1eac3fb3df6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.021935] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781091, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.026994] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1407.026994] env[62525]: value = "task-1781092" [ 1407.026994] env[62525]: _type = "Task" [ 1407.026994] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.035986] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.487495] env[62525]: DEBUG nova.network.neutron [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updated VIF entry in instance network info cache for port 7729ee20-ba8a-4607-95dd-4f5418171e89. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1407.487920] env[62525]: DEBUG nova.network.neutron [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.508276] env[62525]: DEBUG nova.scheduler.client.report [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1407.523129] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781091, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.220134} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.523409] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1407.524265] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2b6118-836d-4122-ac3c-a418917567ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.549228] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/2f589dc1-9244-475f-86d0-4b69b511508b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1407.550301] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-330fe0b4-53cf-410c-849d-e5dd5072c640 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.569249] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.574592] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1407.574592] env[62525]: value = "task-1781093" [ 1407.574592] env[62525]: _type = "Task" [ 1407.574592] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.586835] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.991235] env[62525]: DEBUG oslo_concurrency.lockutils [req-34ae8255-4bb9-4d3e-b834-b262f40008cf req-c579b9cd-05fd-4851-8a30-6ca1f84c5bd7 service nova] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.017851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.021052] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.220s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.023451] env[62525]: INFO nova.compute.claims [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.041934] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781092, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.053090] env[62525]: INFO nova.scheduler.client.report [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Deleted allocations for instance 35a2e221-e1c5-49d9-af93-5e5f28c62b8f [ 1408.089429] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.541585] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781092, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.18301} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.542509] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d2e7c558-02af-477c-b996-239ef14ed75b/d2e7c558-02af-477c-b996-239ef14ed75b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.542752] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.543052] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71e43379-5634-4dc8-8109-127b698be77d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.550039] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1408.550039] env[62525]: value = "task-1781094" [ 1408.550039] env[62525]: _type = "Task" [ 1408.550039] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.562124] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.565464] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db2591e8-cd09-4c55-b1f5-2677200e78ad tempest-ImagesOneServerTestJSON-1762267398 tempest-ImagesOneServerTestJSON-1762267398-project-member] Lock "35a2e221-e1c5-49d9-af93-5e5f28c62b8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.104s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.587201] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781093, 'name': ReconfigVM_Task, 'duration_secs': 0.826119} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.587439] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/2f589dc1-9244-475f-86d0-4b69b511508b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1408.588117] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c42b1c1c-96af-4ea9-9840-00ec15335966 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.594061] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1408.594061] env[62525]: value = "task-1781095" [ 1408.594061] env[62525]: _type = "Task" [ 1408.594061] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.602655] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781095, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.062962] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139425} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.062962] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.062962] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26654219-c42e-4079-80cf-bfb9d52e0d18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.067994] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "aa639aa3-d21c-4923-bc39-56e648c566fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.070482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.070482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.070482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.070482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.071589] env[62525]: INFO nova.compute.manager [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Terminating instance [ 1409.092395] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] d2e7c558-02af-477c-b996-239ef14ed75b/d2e7c558-02af-477c-b996-239ef14ed75b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.095406] env[62525]: DEBUG nova.compute.manager [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1409.097383] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1409.097383] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea3e6d69-99fc-4c08-bed2-8cd7ebef22fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.111221] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235c5245-cada-41e2-99a8-1abe1ac94c9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.122465] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781095, 'name': Rename_Task, 'duration_secs': 0.164387} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.128040] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1409.128235] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1409.128235] env[62525]: value = "task-1781096" [ 1409.128235] env[62525]: _type = "Task" [ 1409.128235] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.128347] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1409.129028] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d98b171f-6a1e-4816-aeae-d3d2308830d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.130463] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f15fd2f-e918-412b-bf67-14ad10763a2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.140783] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.145180] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1409.145180] env[62525]: value = "task-1781097" [ 1409.145180] env[62525]: _type = "Task" [ 1409.145180] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.145683] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1409.145683] env[62525]: value = "task-1781098" [ 1409.145683] env[62525]: _type = "Task" [ 1409.145683] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.159277] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781097, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.163580] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.544328] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a724aeca-9db7-4e81-a635-3a978d51191c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.552889] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a07616-10c7-4e12-a181-302d67b551eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.586166] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5904cd-a770-4421-8716-0f19b7b21aaf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.594338] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e13040-8480-4fff-8ca8-e086a979ad30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.609345] env[62525]: DEBUG nova.compute.provider_tree [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1409.640237] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781096, 'name': ReconfigVM_Task, 'duration_secs': 0.387945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.640529] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfigured VM instance instance-00000027 to attach disk [datastore1] d2e7c558-02af-477c-b996-239ef14ed75b/d2e7c558-02af-477c-b996-239ef14ed75b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.641285] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2110d73-392d-4d67-bf65-2831dc76f1f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.654513] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1409.654513] env[62525]: value = "task-1781099" [ 1409.654513] env[62525]: _type = "Task" [ 1409.654513] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.665723] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781097, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.665844] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781098, 'name': PowerOffVM_Task, 'duration_secs': 0.216423} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.668479] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1409.668479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1409.674540] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fe21082-fb8b-4909-af58-589aba6727e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.676303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.676572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.676836] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781099, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.760967] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1409.761246] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1409.761431] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleting the datastore file [datastore1] aa639aa3-d21c-4923-bc39-56e648c566fb {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1409.761732] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-017fa93b-3087-4a13-b432-df97889dba93 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.768688] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1409.768688] env[62525]: value = "task-1781101" [ 1409.768688] env[62525]: _type = "Task" [ 1409.768688] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.780366] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.136434] env[62525]: ERROR nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [req-9fd23264-f647-4748-914b-45c5b9da1219] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9fd23264-f647-4748-914b-45c5b9da1219"}]} [ 1410.153734] env[62525]: DEBUG nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1410.166505] env[62525]: DEBUG oslo_vmware.api [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781097, 'name': PowerOnVM_Task, 'duration_secs': 0.557194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.166505] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.166505] env[62525]: INFO nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Took 10.53 seconds to spawn the instance on the hypervisor. [ 1410.166505] env[62525]: DEBUG nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1410.167246] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd67227-e67a-49fc-a42a-ec4227cea9ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.174249] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781099, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.175350] env[62525]: DEBUG nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1410.175473] env[62525]: DEBUG nova.compute.provider_tree [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1410.193435] env[62525]: DEBUG nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1410.212590] env[62525]: DEBUG nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1410.279850] env[62525]: DEBUG oslo_vmware.api [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453175} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.282516] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1410.282713] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1410.282886] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1410.283072] env[62525]: INFO nova.compute.manager [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1410.283448] env[62525]: DEBUG oslo.service.loopingcall [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.283813] env[62525]: DEBUG nova.compute.manager [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1410.283911] env[62525]: DEBUG nova.network.neutron [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1410.669817] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781099, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.702528] env[62525]: INFO nova.compute.manager [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Took 46.06 seconds to build instance. [ 1410.810757] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fd4749-51b2-48f2-948a-a6803aeda85d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.818276] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a424becc-534c-4ef3-bf0e-ff609e6132a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.848528] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0581ed02-167b-4849-8bc2-9cbbfe3761f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.856488] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd080c46-f892-44a8-a96a-c30fca55c062 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.872117] env[62525]: DEBUG nova.compute.provider_tree [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.044867] env[62525]: DEBUG nova.compute.manager [req-d8e26068-00eb-49b1-9879-09b7091fdb6c req-22e29189-6072-4c3d-8430-8923c05c1e9a service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Received event network-vif-deleted-682b8bd4-d21c-41b2-a9ed-2eae30b329e0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1411.045470] env[62525]: INFO nova.compute.manager [req-d8e26068-00eb-49b1-9879-09b7091fdb6c req-22e29189-6072-4c3d-8430-8923c05c1e9a service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Neutron deleted interface 682b8bd4-d21c-41b2-a9ed-2eae30b329e0; detaching it from the instance and deleting it from the info cache [ 1411.045470] env[62525]: DEBUG nova.network.neutron [req-d8e26068-00eb-49b1-9879-09b7091fdb6c req-22e29189-6072-4c3d-8430-8923c05c1e9a service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.170623] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781099, 'name': Rename_Task, 'duration_secs': 1.128461} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.170927] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1411.172438] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7a40b91-aa7f-4594-ac2c-5f4655e03975 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.177427] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1411.177427] env[62525]: value = "task-1781102" [ 1411.177427] env[62525]: _type = "Task" [ 1411.177427] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.186161] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.205118] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23ac8dbd-faea-4340-8105-3a04ea053485 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.934s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.409575] env[62525]: DEBUG nova.scheduler.client.report [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1411.409840] env[62525]: DEBUG nova.compute.provider_tree [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 61 to 62 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1411.410029] env[62525]: DEBUG nova.compute.provider_tree [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.524512] env[62525]: DEBUG nova.network.neutron [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.549266] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3965c66b-936b-435d-8aa5-8bc4d7e00f0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.558657] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf9c260-a188-4cdc-b3db-5499a50924de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.593109] env[62525]: DEBUG nova.compute.manager [req-d8e26068-00eb-49b1-9879-09b7091fdb6c req-22e29189-6072-4c3d-8430-8923c05c1e9a service nova] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Detach interface failed, port_id=682b8bd4-d21c-41b2-a9ed-2eae30b329e0, reason: Instance aa639aa3-d21c-4923-bc39-56e648c566fb could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1411.692527] env[62525]: DEBUG oslo_vmware.api [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781102, 'name': PowerOnVM_Task, 'duration_secs': 0.485192} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.693559] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1411.693559] env[62525]: INFO nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1411.693559] env[62525]: DEBUG nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1411.694409] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda8b430-5663-43d8-a1c7-bae93caf512f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.707600] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1411.916063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.895s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.916711] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1411.919921] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.486s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.920337] env[62525]: DEBUG nova.objects.instance [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lazy-loading 'resources' on Instance uuid 7f8392fa-1c11-4180-bda9-057b5cfa058c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.028297] env[62525]: INFO nova.compute.manager [-] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Took 1.74 seconds to deallocate network for instance. [ 1412.214159] env[62525]: INFO nova.compute.manager [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Took 44.76 seconds to build instance. [ 1412.237346] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.239947] env[62525]: INFO nova.compute.manager [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Rescuing [ 1412.240278] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.240485] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.240683] env[62525]: DEBUG nova.network.neutron [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.424731] env[62525]: DEBUG nova.compute.utils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.429710] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1412.429894] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.497779] env[62525]: DEBUG nova.policy [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fba561c0a38b455b9df1afe382dd26cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d90844079ec456b8ea730ac1c348b25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1412.537027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.718976] env[62525]: DEBUG oslo_concurrency.lockutils [None req-342bbb8b-cfa8-4285-a64d-62deef055776 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.899s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.932842] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1412.936307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadaee1d-6ba0-45e7-b9b7-9800f2f83cc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.947470] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b8e9ec-2a64-441a-8f1f-ec9d64bccbeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.990240] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7d8331-5aa8-4947-839b-ccccd70f6947 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.000119] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcefe321-784c-4ab7-afcc-113c3ed69933 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.012537] env[62525]: DEBUG nova.compute.provider_tree [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.176864] env[62525]: DEBUG nova.network.neutron [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updating instance_info_cache with network_info: [{"id": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "address": "fa:16:3e:8d:a5:b5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap988d8ed1-10", "ovs_interfaceid": "988d8ed1-10c8-470a-81b1-0d6c0839c35b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.221229] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1413.351128] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Successfully created port: 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.470219] env[62525]: DEBUG nova.compute.manager [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.470219] env[62525]: DEBUG nova.compute.manager [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing instance network info cache due to event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1413.470219] env[62525]: DEBUG oslo_concurrency.lockutils [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.470219] env[62525]: DEBUG oslo_concurrency.lockutils [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.470219] env[62525]: DEBUG nova.network.neutron [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1413.522241] env[62525]: DEBUG nova.scheduler.client.report [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.679933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-2f589dc1-9244-475f-86d0-4b69b511508b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.748107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.948964] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1413.980765] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1413.981156] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1413.981327] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.981565] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1413.981757] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.982496] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1413.982496] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1413.982496] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1413.982655] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1413.982687] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1413.984123] env[62525]: DEBUG nova.virt.hardware [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1413.984123] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19fe07-e938-47be-a3ab-d0910bc79924 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.993164] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfba10c3-2850-4602-b8b5-73e527f41cf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.027230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.029733] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.395s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.030031] env[62525]: DEBUG nova.objects.instance [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lazy-loading 'resources' on Instance uuid 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.053240] env[62525]: INFO nova.scheduler.client.report [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Deleted allocations for instance 7f8392fa-1c11-4180-bda9-057b5cfa058c [ 1414.222024] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1414.222288] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8feb1dd7-7cd5-44c0-85c4-5dcd95c457f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.233319] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1414.233319] env[62525]: value = "task-1781103" [ 1414.233319] env[62525]: _type = "Task" [ 1414.233319] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.243409] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.270194] env[62525]: DEBUG nova.network.neutron [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updated VIF entry in instance network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1414.270585] env[62525]: DEBUG nova.network.neutron [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.566158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-acc89442-a4b7-408d-b28f-00ba17c12623 tempest-ServerShowV247Test-776421179 tempest-ServerShowV247Test-776421179-project-member] Lock "7f8392fa-1c11-4180-bda9-057b5cfa058c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.908s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.745461] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781103, 'name': PowerOffVM_Task, 'duration_secs': 0.209332} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.748444] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1414.749624] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc96a57e-2476-4ca8-adf2-cb8beea6af13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.772490] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568ec932-f4e5-4317-8ba8-2f0653d1883b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.775817] env[62525]: DEBUG oslo_concurrency.lockutils [req-30b161e2-c1ce-47f7-964f-d0100dc4f435 req-6f644134-5ec8-487e-824f-699dc42852c7 service nova] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.812076] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1414.813227] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae5bcac1-8461-40eb-9cbc-ce8b03bbc2b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.823120] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1414.823120] env[62525]: value = "task-1781104" [ 1414.823120] env[62525]: _type = "Task" [ 1414.823120] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.835614] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1414.835908] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1414.836266] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.836460] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.836669] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1414.837426] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-836f5449-7686-4ca4-afa5-a6e122d23245 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.846880] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1414.847128] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1414.848127] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f916fc85-46ea-4cc4-abb5-7a6a09ebf24a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.860443] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1414.860443] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5251935d-8ede-b3c8-fa78-e17b7b120991" [ 1414.860443] env[62525]: _type = "Task" [ 1414.860443] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.870058] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5251935d-8ede-b3c8-fa78-e17b7b120991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.061252] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d32050-4281-4120-a41c-5a7a54a90071 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.071469] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25573ab5-e0f2-4e82-aecc-4473a42fd43c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.108329] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414b5ab7-bfd9-4d52-9cd0-5caeb2d9c91c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.117995] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccf5a0c-9ab8-4306-8aa8-2556d4c8e8af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.137471] env[62525]: DEBUG nova.compute.provider_tree [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.372708] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5251935d-8ede-b3c8-fa78-e17b7b120991, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.373841] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0ce0153-b3ff-4e3e-b15e-e3c75f6e9dd4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.380533] env[62525]: DEBUG nova.compute.manager [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Received event network-vif-plugged-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1415.380760] env[62525]: DEBUG oslo_concurrency.lockutils [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] Acquiring lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.380969] env[62525]: DEBUG oslo_concurrency.lockutils [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.381205] env[62525]: DEBUG oslo_concurrency.lockutils [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.381343] env[62525]: DEBUG nova.compute.manager [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] No waiting events found dispatching network-vif-plugged-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1415.381516] env[62525]: WARNING nova.compute.manager [req-80e5441f-9a36-43f1-9ee1-2c1122d9be10 req-6470c7f1-62d3-4001-a7d8-1df609002f0d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Received unexpected event network-vif-plugged-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 for instance with vm_state building and task_state spawning. [ 1415.383078] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1415.383078] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525a9125-5243-5528-7a45-d2c543d23e85" [ 1415.383078] env[62525]: _type = "Task" [ 1415.383078] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.392849] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]525a9125-5243-5528-7a45-d2c543d23e85, 'name': SearchDatastore_Task, 'duration_secs': 0.009403} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.393101] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.393358] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1415.393599] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e10e3bf3-8c0c-435e-a528-04f8d74553d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.400422] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1415.400422] env[62525]: value = "task-1781105" [ 1415.400422] env[62525]: _type = "Task" [ 1415.400422] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.411546] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.482328] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Successfully updated port: 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1415.645783] env[62525]: DEBUG nova.scheduler.client.report [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1415.768318] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "d8c7d102-46e6-40fe-a864-a72590af4982" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.768790] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.911696] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.985536] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.985687] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquired lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.985841] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.154031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.124s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.156422] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.289s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.156689] env[62525]: DEBUG nova.objects.instance [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lazy-loading 'resources' on Instance uuid 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.174846] env[62525]: INFO nova.scheduler.client.report [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Deleted allocations for instance 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07 [ 1416.411537] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781105, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.519566] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.683475] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4d8eadc6-2a70-4fb0-b425-76f55683d08f tempest-FloatingIPsAssociationTestJSON-1438981390 tempest-FloatingIPsAssociationTestJSON-1438981390-project-member] Lock "5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.288s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.692304] env[62525]: DEBUG nova.network.neutron [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updating instance_info_cache with network_info: [{"id": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "address": "fa:16:3e:39:b3:2b", "network": {"id": "ea70d7a5-7017-4596-b532-b0b9e7f66a64", "bridge": "br-int", "label": "tempest-ServersTestJSON-480029300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d90844079ec456b8ea730ac1c348b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc988c5-01", "ovs_interfaceid": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.912047] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781105, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.324258} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.912252] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1416.913028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bdc90b-6a6d-4c51-b05c-39484bfc1b40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.938780] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1416.940888] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0d56cfe-d15d-4ee4-9a6c-6a7d320a625b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.958617] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1416.958617] env[62525]: value = "task-1781106" [ 1416.958617] env[62525]: _type = "Task" [ 1416.958617] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.968247] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781106, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.097488] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f70171-43aa-4915-ab7f-2bce45508855 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.104777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c263564c-84b1-46ac-a9c0-a6f59b2a40a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.136410] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f27de2-fe75-4e1f-adbd-f82f04b88612 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.144299] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80a2850-1ec4-4e80-a3aa-46926cacc0ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.157669] env[62525]: DEBUG nova.compute.provider_tree [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.194918] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Releasing lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.195243] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Instance network_info: |[{"id": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "address": "fa:16:3e:39:b3:2b", "network": {"id": "ea70d7a5-7017-4596-b532-b0b9e7f66a64", "bridge": "br-int", "label": "tempest-ServersTestJSON-480029300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d90844079ec456b8ea730ac1c348b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc988c5-01", "ovs_interfaceid": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1417.195632] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:b3:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dc988c5-019e-4c2d-bd0f-5e15f1e00e11', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.203336] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Creating folder: Project (7d90844079ec456b8ea730ac1c348b25). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.203889] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1ff3db4-1e9a-4299-b5ea-f96a3906d828 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.214348] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Created folder: Project (7d90844079ec456b8ea730ac1c348b25) in parent group-v369553. [ 1417.214554] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Creating folder: Instances. Parent ref: group-v369667. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.214777] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-810dc04b-8c78-4160-bb3f-cfcfcd6554f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.223584] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Created folder: Instances in parent group-v369667. [ 1417.223802] env[62525]: DEBUG oslo.service.loopingcall [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.223977] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.224177] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4c8c00d-99c4-4db4-a608-2c373ac790ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.243406] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.243406] env[62525]: value = "task-1781109" [ 1417.243406] env[62525]: _type = "Task" [ 1417.243406] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.251198] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781109, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.410197] env[62525]: DEBUG nova.compute.manager [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Received event network-changed-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1417.410369] env[62525]: DEBUG nova.compute.manager [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Refreshing instance network info cache due to event network-changed-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1417.410584] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] Acquiring lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.410731] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] Acquired lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.410911] env[62525]: DEBUG nova.network.neutron [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Refreshing network info cache for port 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.473513] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.661253] env[62525]: DEBUG nova.scheduler.client.report [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1417.752799] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781109, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.971548] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781106, 'name': ReconfigVM_Task, 'duration_secs': 0.744605} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.971709] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1417.972590] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f23a8c-f352-4440-be84-1a86334fd224 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.996948] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c439261f-00b9-413d-a418-43eb035e3506 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.014832] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1418.014832] env[62525]: value = "task-1781110" [ 1418.014832] env[62525]: _type = "Task" [ 1418.014832] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.022442] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.167452] env[62525]: DEBUG nova.network.neutron [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updated VIF entry in instance network info cache for port 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.167452] env[62525]: DEBUG nova.network.neutron [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updating instance_info_cache with network_info: [{"id": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "address": "fa:16:3e:39:b3:2b", "network": {"id": "ea70d7a5-7017-4596-b532-b0b9e7f66a64", "bridge": "br-int", "label": "tempest-ServersTestJSON-480029300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d90844079ec456b8ea730ac1c348b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc988c5-01", "ovs_interfaceid": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.171471] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.175221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.831s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.175470] env[62525]: DEBUG nova.objects.instance [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lazy-loading 'resources' on Instance uuid 9dfb7d7f-6656-46fd-969e-c692db1ce507 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1418.198226] env[62525]: INFO nova.scheduler.client.report [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted allocations for instance 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6 [ 1418.255933] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781109, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.524911] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781110, 'name': ReconfigVM_Task, 'duration_secs': 0.314301} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.525231] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.525532] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c654097f-36ff-4db3-9a27-7bec56103ffb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.531777] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1418.531777] env[62525]: value = "task-1781111" [ 1418.531777] env[62525]: _type = "Task" [ 1418.531777] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.539476] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.667761] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8efc9db-1fcf-47c6-926c-712fd5c9f690 req-fa6a506b-af69-498c-834f-4a04f0e455ca service nova] Releasing lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.707969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-24af4c32-8b2a-41f8-ab3f-594dff5fbb65 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "0e8254af-403d-4f5d-ac58-f3b4efc0c3d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.023s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.758561] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781109, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.041749] env[62525]: DEBUG oslo_vmware.api [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781111, 'name': PowerOnVM_Task, 'duration_secs': 0.484234} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.044266] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.046807] env[62525]: DEBUG nova.compute.manager [None req-a106fb97-d01f-4f0f-97a8-36b7c859d310 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1419.048939] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb899441-8c9d-42b8-80bf-f329044abe39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.144731] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ac943e-3d2d-4693-8464-c723b6878be9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.153058] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e26342-fe68-41e2-a12c-b870d3e5ccb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.186532] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006d3688-1553-440a-9c04-87839c18d8d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.194703] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c415c5-2328-4740-b926-805bb36c44e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.208897] env[62525]: DEBUG nova.compute.provider_tree [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.257910] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781109, 'name': CreateVM_Task, 'duration_secs': 1.667715} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.258130] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.258945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.259107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.259479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.259733] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9cf5b7e-ca98-432f-a553-f06625548c31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.264333] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1419.264333] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523b0d6a-637e-b2f4-15f6-9ca374334bba" [ 1419.264333] env[62525]: _type = "Task" [ 1419.264333] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.267465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.267926] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.268023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.268188] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.268364] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.270516] env[62525]: INFO nova.compute.manager [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Terminating instance [ 1419.274993] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b0d6a-637e-b2f4-15f6-9ca374334bba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.275411] env[62525]: DEBUG nova.compute.manager [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1419.275678] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.276351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107f6d51-4cd5-4092-b56d-885fc859a7c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.283121] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.283355] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6abe1ab2-3288-46e4-b189-1d287b3bca5b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.290314] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1419.290314] env[62525]: value = "task-1781112" [ 1419.290314] env[62525]: _type = "Task" [ 1419.290314] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.298250] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.712654] env[62525]: DEBUG nova.scheduler.client.report [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1419.775253] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b0d6a-637e-b2f4-15f6-9ca374334bba, 'name': SearchDatastore_Task, 'duration_secs': 0.017007} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.775583] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.775815] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.776055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.776239] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.776431] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.776690] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f474fe9-3c49-4a16-bc56-ba04da89460c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.801776] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781112, 'name': PowerOffVM_Task, 'duration_secs': 0.268549} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.802021] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.802198] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.802439] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a361d274-97fc-472b-aa1f-af11356d5b5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.810989] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.810989] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1419.811437] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733ba114-a8c3-429e-94d3-20cadb65fb4a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.816621] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1419.816621] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528ae327-d1d2-32e8-333d-6984c75093d6" [ 1419.816621] env[62525]: _type = "Task" [ 1419.816621] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.825890] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528ae327-d1d2-32e8-333d-6984c75093d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.942287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.942287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.942287] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleting the datastore file [datastore1] 98334a1b-1a73-408f-93a4-6dc72764ebfc {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.942287] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85ce6839-97a7-4762-8639-3d1422c026e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.944027] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for the task: (returnval){ [ 1419.944027] env[62525]: value = "task-1781114" [ 1419.944027] env[62525]: _type = "Task" [ 1419.944027] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.952391] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.218674] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.221403] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.481s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.223017] env[62525]: INFO nova.compute.claims [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1420.246251] env[62525]: INFO nova.scheduler.client.report [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted allocations for instance 9dfb7d7f-6656-46fd-969e-c692db1ce507 [ 1420.328857] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528ae327-d1d2-32e8-333d-6984c75093d6, 'name': SearchDatastore_Task, 'duration_secs': 0.014507} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.330103] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-597979cf-6aa1-4f88-92a8-11bee3e96ff5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.335969] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1420.335969] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529288c7-1bca-774c-fd13-91f45c2c11ad" [ 1420.335969] env[62525]: _type = "Task" [ 1420.335969] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.344469] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529288c7-1bca-774c-fd13-91f45c2c11ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.455850] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.757563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7b95f35d-2325-48e8-969b-2c6e329a58a6 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "9dfb7d7f-6656-46fd-969e-c692db1ce507" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.438s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.850768] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529288c7-1bca-774c-fd13-91f45c2c11ad, 'name': SearchDatastore_Task, 'duration_secs': 0.010156} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.851107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.851957] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1f3792c0-9f86-4d76-a1a6-28d492869046/1f3792c0-9f86-4d76-a1a6-28d492869046.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1420.851957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0335905-7fc9-416e-8bf4-2968dbad28f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.860112] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1420.860112] env[62525]: value = "task-1781115" [ 1420.860112] env[62525]: _type = "Task" [ 1420.860112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.868995] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.956847] env[62525]: DEBUG oslo_vmware.api [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Task: {'id': task-1781114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.563624} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.959695] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.959695] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1420.959695] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1420.959695] env[62525]: INFO nova.compute.manager [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1420.959695] env[62525]: DEBUG oslo.service.loopingcall [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.960170] env[62525]: DEBUG nova.compute.manager [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1420.960170] env[62525]: DEBUG nova.network.neutron [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1421.370895] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781115, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.481070] env[62525]: DEBUG nova.compute.manager [req-52fcefea-be5e-430f-b542-5c9cad7fec57 req-c8dbb6cb-971a-4627-b2d6-ed4b1744d70b service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Received event network-vif-deleted-ce676e95-1fd5-4abf-9228-aa35cc8606e6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1421.481410] env[62525]: INFO nova.compute.manager [req-52fcefea-be5e-430f-b542-5c9cad7fec57 req-c8dbb6cb-971a-4627-b2d6-ed4b1744d70b service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Neutron deleted interface ce676e95-1fd5-4abf-9228-aa35cc8606e6; detaching it from the instance and deleting it from the info cache [ 1421.481528] env[62525]: DEBUG nova.network.neutron [req-52fcefea-be5e-430f-b542-5c9cad7fec57 req-c8dbb6cb-971a-4627-b2d6-ed4b1744d70b service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.702010] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd3ffb8-d7f2-47c6-a46a-ac1424e604c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.710760] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e83b615-2754-4d6d-9661-2885f54c2ee6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.748565] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bba0d8-d79a-42b6-8ff6-50c02bd367ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.757469] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43925c8e-e021-46cf-bdc6-421e3add9d54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.775415] env[62525]: DEBUG nova.compute.provider_tree [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.804229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.804507] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.804719] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.804899] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.805076] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.807831] env[62525]: INFO nova.compute.manager [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Terminating instance [ 1421.809848] env[62525]: DEBUG nova.network.neutron [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.811241] env[62525]: DEBUG nova.compute.manager [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1421.811456] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1421.811684] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-179034ba-f2ea-47d2-be9d-4ba8ec175724 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.819497] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1421.819497] env[62525]: value = "task-1781116" [ 1421.819497] env[62525]: _type = "Task" [ 1421.819497] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.830306] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.873568] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.873811] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1f3792c0-9f86-4d76-a1a6-28d492869046/1f3792c0-9f86-4d76-a1a6-28d492869046.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.874029] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.874285] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e383cc6d-6fc3-4456-b6b6-430c53c1719e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.881087] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1421.881087] env[62525]: value = "task-1781117" [ 1421.881087] env[62525]: _type = "Task" [ 1421.881087] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.890335] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.983993] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd3b30c9-30d9-4e4d-a3f1-f7556065c1bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.993233] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42af893c-1e4e-49e2-be4e-d56d8a90ce0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.027370] env[62525]: DEBUG nova.compute.manager [req-52fcefea-be5e-430f-b542-5c9cad7fec57 req-c8dbb6cb-971a-4627-b2d6-ed4b1744d70b service nova] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Detach interface failed, port_id=ce676e95-1fd5-4abf-9228-aa35cc8606e6, reason: Instance 98334a1b-1a73-408f-93a4-6dc72764ebfc could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1422.278473] env[62525]: DEBUG nova.scheduler.client.report [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1422.314658] env[62525]: INFO nova.compute.manager [-] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Took 1.36 seconds to deallocate network for instance. [ 1422.331749] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781116, 'name': PowerOffVM_Task, 'duration_secs': 0.230306} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.331978] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.332244] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1422.332472] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369595', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'name': 'volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'serial': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1422.333944] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dff89de-6e3a-410d-8dec-e47cc245c6b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.354681] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bb705d-623c-47a7-b8b4-e609f026fefc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.362046] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b1c7da-6b56-4c37-bc6a-8b3ef1d6d609 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.380472] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def22813-be34-40e1-8f3f-edf7a7312520 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.404598] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] The volume has not been displaced from its original location: [datastore1] volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb/volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1422.409880] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Reconfiguring VM instance instance-00000020 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1422.410228] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15803} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.410444] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03805002-c61b-4ba2-9c9c-0c7b93671c19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.423251] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1422.424060] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ec1017-b136-4e23-ace7-8359b1cc9c92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.447750] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 1f3792c0-9f86-4d76-a1a6-28d492869046/1f3792c0-9f86-4d76-a1a6-28d492869046.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.449223] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08f6e361-9037-4b83-97db-fc110e1b0476 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.465185] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1422.465185] env[62525]: value = "task-1781118" [ 1422.465185] env[62525]: _type = "Task" [ 1422.465185] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.472449] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1422.472449] env[62525]: value = "task-1781119" [ 1422.472449] env[62525]: _type = "Task" [ 1422.472449] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.480841] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.487922] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.624626] env[62525]: INFO nova.compute.manager [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Rescuing [ 1422.624626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.624806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.624955] env[62525]: DEBUG nova.network.neutron [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1422.643335] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "70313696-a9cc-499c-b9e6-329a71c4b915" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.643559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.783460] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.783986] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1422.786812] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.189s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.788393] env[62525]: INFO nova.compute.claims [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1422.823295] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.980554] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781118, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.988775] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.294199] env[62525]: DEBUG nova.compute.utils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1423.299507] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1423.299507] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1423.362467] env[62525]: DEBUG nova.policy [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffe69151c9b24ac3a4565ede52508534', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f146b0ac3df644fe9b0b9b9bef48c34d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1423.450859] env[62525]: DEBUG nova.network.neutron [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.485778] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781118, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.496315] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781119, 'name': ReconfigVM_Task, 'duration_secs': 0.966128} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.497033] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 1f3792c0-9f86-4d76-a1a6-28d492869046/1f3792c0-9f86-4d76-a1a6-28d492869046.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1423.497739] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d6c8828-c54d-41f6-af31-bfa2ce058123 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.504475] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1423.504475] env[62525]: value = "task-1781120" [ 1423.504475] env[62525]: _type = "Task" [ 1423.504475] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.515294] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781120, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.768826] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Successfully created port: c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1423.800110] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1423.953665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.984910] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781118, 'name': ReconfigVM_Task, 'duration_secs': 1.177791} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.985614] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Reconfigured VM instance instance-00000020 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1423.990566] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c94c50e-f66f-4511-ac97-9ce4f8e0e790 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.012225] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1424.012225] env[62525]: value = "task-1781121" [ 1424.012225] env[62525]: _type = "Task" [ 1424.012225] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.020261] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781120, 'name': Rename_Task, 'duration_secs': 0.240502} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.023074] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1424.024019] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3df89e62-6be2-4b64-8c46-23e4279c88b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.029286] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.034079] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1424.034079] env[62525]: value = "task-1781122" [ 1424.034079] env[62525]: _type = "Task" [ 1424.034079] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.045980] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.273950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f665c05-d520-4c94-90f5-778055729be6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.282299] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6d1a1e-ce58-4509-a1f6-0d5c19dc983c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.319076] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6984c11f-7d2c-4819-9af3-e1ad12a32eed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.326888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b1b547-9e18-4cfb-8226-4a9a327b1920 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.341809] env[62525]: DEBUG nova.compute.provider_tree [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.510427] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.510709] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-110a907c-0dec-4e9f-9066-b9679a97dfd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.517814] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1424.517814] env[62525]: value = "task-1781123" [ 1424.517814] env[62525]: _type = "Task" [ 1424.517814] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.522025] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781121, 'name': ReconfigVM_Task, 'duration_secs': 0.137735} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.524292] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369595', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'name': 'volume-ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb', 'serial': 'ea89af57-65c0-4ea1-9faf-5585daa59ceb'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1424.524569] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1424.525412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aa81a3-b34d-431b-a001-7b4ed0642d70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.532425] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.534784] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1424.537877] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19b55d2b-90b6-4c21-bb77-b391fe8563f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.544998] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781122, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.641949] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.642241] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.642525] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Deleting the datastore file [datastore1] cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.643094] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51ddab29-0507-4ab0-b361-97234565ee3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.649930] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for the task: (returnval){ [ 1424.649930] env[62525]: value = "task-1781125" [ 1424.649930] env[62525]: _type = "Task" [ 1424.649930] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.658947] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.825838] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1424.846703] env[62525]: DEBUG nova.scheduler.client.report [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1424.857827] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1424.858116] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1424.858375] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1424.858589] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1424.858737] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1424.858889] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1424.859138] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1424.859336] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1424.859506] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1424.860310] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1424.860310] env[62525]: DEBUG nova.virt.hardware [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1424.860831] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad1024f-70c6-4881-bb9d-9c638e8c2c1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.870268] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2df93b-8302-4afe-bfcc-3e8349515e0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.031404] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781123, 'name': PowerOffVM_Task, 'duration_secs': 0.27902} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.031820] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.032617] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508fbb06-a103-4037-8e52-44a02255f299 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.056589] env[62525]: DEBUG oslo_vmware.api [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781122, 'name': PowerOnVM_Task, 'duration_secs': 0.781536} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.057287] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a330b61e-58e6-41d0-884d-ffe84f913538 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.059823] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1425.060039] env[62525]: INFO nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Took 11.11 seconds to spawn the instance on the hypervisor. [ 1425.060217] env[62525]: DEBUG nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1425.060901] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667a794c-3ed1-4dca-85de-5581f047a4d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.765411] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Successfully updated port: c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1425.767729] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.981s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.768265] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1425.771427] env[62525]: DEBUG nova.compute.manager [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Received event network-vif-plugged-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.771624] env[62525]: DEBUG oslo_concurrency.lockutils [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.771823] env[62525]: DEBUG oslo_concurrency.lockutils [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.771983] env[62525]: DEBUG oslo_concurrency.lockutils [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.772157] env[62525]: DEBUG nova.compute.manager [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] No waiting events found dispatching network-vif-plugged-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1425.772316] env[62525]: WARNING nova.compute.manager [req-f5d0f722-b84e-4b0c-bccf-3880a19539b9 req-617d9df2-2c2d-4da9-9ba1-e2f0bbade6fe service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Received unexpected event network-vif-plugged-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca for instance with vm_state building and task_state spawning. [ 1425.778835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.328s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.779080] env[62525]: DEBUG nova.objects.instance [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lazy-loading 'resources' on Instance uuid 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1425.792846] env[62525]: DEBUG oslo_vmware.api [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Task: {'id': task-1781125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107669} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.793105] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.793285] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1425.793455] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1425.793615] env[62525]: INFO nova.compute.manager [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Took 3.98 seconds to destroy the instance on the hypervisor. [ 1425.793838] env[62525]: DEBUG oslo.service.loopingcall [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.794245] env[62525]: DEBUG nova.compute.manager [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1425.794345] env[62525]: DEBUG nova.network.neutron [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.803673] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.804219] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bec5dd45-7698-4ffa-a4ab-5505ac8ef259 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.811691] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1425.811691] env[62525]: value = "task-1781126" [ 1425.811691] env[62525]: _type = "Task" [ 1425.811691] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.822541] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1425.822812] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1425.823067] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.823217] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.823443] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.823715] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d741bef8-0d18-4702-ba57-6f2f097a8ed4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.836880] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.837179] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1425.841172] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27c8be18-99bf-493a-a70f-1c884d425812 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.846699] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1425.846699] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a87b82-2a06-d39a-8d3f-c48a146ce33d" [ 1425.846699] env[62525]: _type = "Task" [ 1425.846699] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.853865] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a87b82-2a06-d39a-8d3f-c48a146ce33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.276102] env[62525]: DEBUG nova.compute.utils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1426.276940] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1426.277240] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1426.279505] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.279661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquired lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.279832] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1426.298210] env[62525]: INFO nova.compute.manager [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Took 45.52 seconds to build instance. [ 1426.356839] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a87b82-2a06-d39a-8d3f-c48a146ce33d, 'name': SearchDatastore_Task, 'duration_secs': 0.020291} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.360468] env[62525]: DEBUG nova.policy [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef8d66aa33c2442ea266c3b687ba2d6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77eae31161444518aadfe27dd51c2081', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1426.362789] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16b88a87-2b5f-4796-8d81-124eae6a7e59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.372038] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1426.372038] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d56479-ed55-e669-0de7-4b21e3b4ac6f" [ 1426.372038] env[62525]: _type = "Task" [ 1426.372038] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.378293] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d56479-ed55-e669-0de7-4b21e3b4ac6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.686568] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Successfully created port: 151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.767752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a360a43f-8ed9-4177-a161-2cd581ab8236 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.777980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88a4f14-6c0e-4627-8c18-456edcf7be2f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.781990] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1426.816391] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a0a57d34-b35b-413c-bcf7-2add6407ef27 tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.492s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.819214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f041add9-64cf-48af-9e3f-66ac39df089f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.827583] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6913c0-ee42-4502-ad7f-5ac68cd7010b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.835019] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.845419] env[62525]: DEBUG nova.compute.provider_tree [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.849562] env[62525]: DEBUG nova.network.neutron [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.882876] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d56479-ed55-e669-0de7-4b21e3b4ac6f, 'name': SearchDatastore_Task, 'duration_secs': 0.01009} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.882876] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.882876] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1426.882876] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d051ea91-73f9-427f-8396-5c2ef0156ac0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.889545] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1426.889545] env[62525]: value = "task-1781127" [ 1426.889545] env[62525]: _type = "Task" [ 1426.889545] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.898432] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.003580] env[62525]: DEBUG nova.network.neutron [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating instance_info_cache with network_info: [{"id": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "address": "fa:16:3e:d9:21:74", "network": {"id": "26390d2b-b8a0-4c73-ba85-09aba5f224a6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1630990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f146b0ac3df644fe9b0b9b9bef48c34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bb49cb-09", "ovs_interfaceid": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.260405] env[62525]: DEBUG nova.compute.manager [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Received event network-changed-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.260797] env[62525]: DEBUG nova.compute.manager [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Refreshing instance network info cache due to event network-changed-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1427.261141] env[62525]: DEBUG oslo_concurrency.lockutils [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] Acquiring lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.326019] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1427.348835] env[62525]: DEBUG nova.scheduler.client.report [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1427.352948] env[62525]: INFO nova.compute.manager [-] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Took 1.56 seconds to deallocate network for instance. [ 1427.401626] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781127, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.509592] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Releasing lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.509592] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Instance network_info: |[{"id": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "address": "fa:16:3e:d9:21:74", "network": {"id": "26390d2b-b8a0-4c73-ba85-09aba5f224a6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1630990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f146b0ac3df644fe9b0b9b9bef48c34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bb49cb-09", "ovs_interfaceid": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1427.509835] env[62525]: DEBUG oslo_concurrency.lockutils [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] Acquired lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.509835] env[62525]: DEBUG nova.network.neutron [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Refreshing network info cache for port c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1427.509835] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:21:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1427.521100] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Creating folder: Project (f146b0ac3df644fe9b0b9b9bef48c34d). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1427.524248] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bccf8e9-b48f-4a53-a65c-9f198b0529d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.536990] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Created folder: Project (f146b0ac3df644fe9b0b9b9bef48c34d) in parent group-v369553. [ 1427.536990] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Creating folder: Instances. Parent ref: group-v369670. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1427.537511] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ab9bf9b-4d9b-4862-a6d7-1407ad2b6949 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.551451] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Created folder: Instances in parent group-v369670. [ 1427.551708] env[62525]: DEBUG oslo.service.loopingcall [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.551913] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1427.552223] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9711e95f-0672-4b3c-83e0-7578c9279e12 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.575265] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1427.575265] env[62525]: value = "task-1781130" [ 1427.575265] env[62525]: _type = "Task" [ 1427.575265] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.584897] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781130, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.795341] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1427.826811] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.827091] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.827263] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.827633] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.827633] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.827772] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.828018] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.828108] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.828339] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.828520] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.828690] env[62525]: DEBUG nova.virt.hardware [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.829640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e344d164-9404-49d0-bb36-d52d007c7e06 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.843290] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dba0ee-ce75-427e-9ee0-382ede53d9ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.858520] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.861678] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.861981] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.810s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.862207] env[62525]: DEBUG nova.objects.instance [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lazy-loading 'resources' on Instance uuid cfae9bf8-012a-4286-b978-bba8a913bba2 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.898915] env[62525]: INFO nova.scheduler.client.report [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Deleted allocations for instance 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6 [ 1427.911859] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824856} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.912346] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1427.913558] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb38101-ce5a-4ba9-99d6-29e12ee8393d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.942093] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.943704] env[62525]: INFO nova.compute.manager [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Took 0.59 seconds to detach 1 volumes for instance. [ 1427.948211] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5696157-8be1-4092-9cac-c4863f6588fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.962598] env[62525]: DEBUG nova.compute.manager [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Deleting volume: ea89af57-65c0-4ea1-9faf-5585daa59ceb {{(pid=62525) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1427.972619] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1427.972619] env[62525]: value = "task-1781131" [ 1427.972619] env[62525]: _type = "Task" [ 1427.972619] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.983791] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781131, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.086709] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781130, 'name': CreateVM_Task, 'duration_secs': 0.355933} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.086924] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1428.087609] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.087777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.088122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1428.088414] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2e16a23-a223-494f-9b2b-c66d6349d811 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.093118] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1428.093118] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528c90a1-ab10-8491-c71f-938957932815" [ 1428.093118] env[62525]: _type = "Task" [ 1428.093118] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.101099] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528c90a1-ab10-8491-c71f-938957932815, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.261189] env[62525]: DEBUG nova.network.neutron [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updated VIF entry in instance network info cache for port c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1428.261529] env[62525]: DEBUG nova.network.neutron [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating instance_info_cache with network_info: [{"id": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "address": "fa:16:3e:d9:21:74", "network": {"id": "26390d2b-b8a0-4c73-ba85-09aba5f224a6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1630990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f146b0ac3df644fe9b0b9b9bef48c34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bb49cb-09", "ovs_interfaceid": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.415129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0475a4-0813-41a8-99da-bf088bd7db6a tempest-ServersAdminNegativeTestJSON-298559599 tempest-ServersAdminNegativeTestJSON-298559599-project-member] Lock "9a7bfafe-8598-4c6f-9714-0567fcbb8ea6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.946s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.483286] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781131, 'name': ReconfigVM_Task, 'duration_secs': 0.337855} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.483585] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfigured VM instance instance-00000024 to attach disk [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.484436] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fa8cf0-fee7-4476-bb38-681043ccc7bf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.515080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.515346] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88c4a69d-5fe3-43b2-b88b-fead2eb39f25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.531490] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1428.531490] env[62525]: value = "task-1781133" [ 1428.531490] env[62525]: _type = "Task" [ 1428.531490] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.542590] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.549249] env[62525]: DEBUG nova.compute.manager [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Received event network-vif-plugged-151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.550069] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] Acquiring lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.550069] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.550069] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.550069] env[62525]: DEBUG nova.compute.manager [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] No waiting events found dispatching network-vif-plugged-151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1428.550980] env[62525]: WARNING nova.compute.manager [req-1e5efef0-5362-4bea-95a8-505c1fb7aac2 req-2ec2bae5-255b-493f-ba7d-835e3e45a600 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Received unexpected event network-vif-plugged-151d8aa1-065a-409f-9d41-61d553ade236 for instance with vm_state building and task_state spawning. [ 1428.604022] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528c90a1-ab10-8491-c71f-938957932815, 'name': SearchDatastore_Task, 'duration_secs': 0.01702} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.609063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.609063] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1428.609063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.609063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.609684] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1428.609684] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1c83a12-5abe-45b8-9696-e4237894aeba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.610068] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Successfully updated port: 151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1428.618704] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1428.618933] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1428.619907] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bb377fe-8742-4f2d-b3b6-2b022c5bdcdd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.629856] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1428.629856] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526a9a5a-9115-d5a7-a8f1-cd4a603f2a5b" [ 1428.629856] env[62525]: _type = "Task" [ 1428.629856] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.641321] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526a9a5a-9115-d5a7-a8f1-cd4a603f2a5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.767989] env[62525]: DEBUG oslo_concurrency.lockutils [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] Releasing lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.768491] env[62525]: DEBUG nova.compute.manager [req-38a77ef3-f37a-4cd9-96e9-a8551abdcba2 req-acc63385-3ebe-479b-867f-49bce1e55d21 service nova] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Received event network-vif-deleted-28c8b4db-8c62-4e51-a573-d0e05371bbd6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.827351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cfe3f5-179d-4af1-aae3-b54463707846 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.835051] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6089202c-5ce1-4f3c-8bc4-76a75af9fb06 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.865059] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa24c680-4fda-44d5-8cca-a4da44ee8144 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.871821] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847e65eb-815d-42ba-9747-5d97e3b4b251 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.884390] env[62525]: DEBUG nova.compute.provider_tree [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.041233] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781133, 'name': ReconfigVM_Task, 'duration_secs': 0.152983} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.041518] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.041787] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb673420-d63b-43f7-9259-cb08ba5f061e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.049287] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1429.049287] env[62525]: value = "task-1781134" [ 1429.049287] env[62525]: _type = "Task" [ 1429.049287] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.057062] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.113413] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.113599] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.113771] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1429.140170] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526a9a5a-9115-d5a7-a8f1-cd4a603f2a5b, 'name': SearchDatastore_Task, 'duration_secs': 0.01644} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.140968] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8233461a-4bc1-4215-86c2-ae51f56bb56f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.147319] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1429.147319] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cd4e96-a0d7-6701-468c-f5877fefa328" [ 1429.147319] env[62525]: _type = "Task" [ 1429.147319] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.156023] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cd4e96-a0d7-6701-468c-f5877fefa328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.287903] env[62525]: DEBUG nova.compute.manager [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Received event network-changed-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1429.288099] env[62525]: DEBUG nova.compute.manager [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Refreshing instance network info cache due to event network-changed-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1429.288299] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] Acquiring lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.288470] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] Acquired lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.288645] env[62525]: DEBUG nova.network.neutron [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Refreshing network info cache for port 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1429.388537] env[62525]: DEBUG nova.scheduler.client.report [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1429.560277] env[62525]: DEBUG oslo_vmware.api [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781134, 'name': PowerOnVM_Task, 'duration_secs': 0.394378} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.560701] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.564408] env[62525]: DEBUG nova.compute.manager [None req-e66d71f7-d381-4f74-b233-c0dc525afdca tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.565232] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cae85b-6c01-48f4-a07b-5f8635da7cd9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.651957] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.659865] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cd4e96-a0d7-6701-468c-f5877fefa328, 'name': SearchDatastore_Task, 'duration_secs': 0.014002} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.660140] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.660399] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e8864d73-35e6-490b-a07c-e8cac8baf880/e8864d73-35e6-490b-a07c-e8cac8baf880.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1429.660655] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e721f823-616a-4919-8920-9e79bd3a08f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.666842] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1429.666842] env[62525]: value = "task-1781135" [ 1429.666842] env[62525]: _type = "Task" [ 1429.666842] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.677040] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.811665] env[62525]: DEBUG nova.network.neutron [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updating instance_info_cache with network_info: [{"id": "151d8aa1-065a-409f-9d41-61d553ade236", "address": "fa:16:3e:e1:36:e6", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151d8aa1-06", "ovs_interfaceid": "151d8aa1-065a-409f-9d41-61d553ade236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.894882] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.033s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.899339] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.964s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.899339] env[62525]: INFO nova.compute.claims [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.925463] env[62525]: INFO nova.scheduler.client.report [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Deleted allocations for instance cfae9bf8-012a-4286-b978-bba8a913bba2 [ 1430.125325] env[62525]: DEBUG nova.network.neutron [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updated VIF entry in instance network info cache for port 3dc988c5-019e-4c2d-bd0f-5e15f1e00e11. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1430.125692] env[62525]: DEBUG nova.network.neutron [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updating instance_info_cache with network_info: [{"id": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "address": "fa:16:3e:39:b3:2b", "network": {"id": "ea70d7a5-7017-4596-b532-b0b9e7f66a64", "bridge": "br-int", "label": "tempest-ServersTestJSON-480029300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d90844079ec456b8ea730ac1c348b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc988c5-01", "ovs_interfaceid": "3dc988c5-019e-4c2d-bd0f-5e15f1e00e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.179806] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781135, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.313887] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.314209] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Instance network_info: |[{"id": "151d8aa1-065a-409f-9d41-61d553ade236", "address": "fa:16:3e:e1:36:e6", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151d8aa1-06", "ovs_interfaceid": "151d8aa1-065a-409f-9d41-61d553ade236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1430.314705] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:36:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '151d8aa1-065a-409f-9d41-61d553ade236', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1430.322367] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Creating folder: Project (77eae31161444518aadfe27dd51c2081). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1430.323032] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe417a3e-ceb0-4117-bdcb-a5feb1ae3970 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.334424] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Created folder: Project (77eae31161444518aadfe27dd51c2081) in parent group-v369553. [ 1430.334698] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Creating folder: Instances. Parent ref: group-v369673. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1430.334842] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d50202a1-f4ec-49a2-8485-e5222da0d21a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.345025] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Created folder: Instances in parent group-v369673. [ 1430.345025] env[62525]: DEBUG oslo.service.loopingcall [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1430.345197] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1430.345386] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2a98fbd-8ff3-433d-acda-4c1b4030d555 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.374031] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1430.374031] env[62525]: value = "task-1781138" [ 1430.374031] env[62525]: _type = "Task" [ 1430.374031] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.382111] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781138, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.440793] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c77649d5-af7c-44d7-acea-270151cf1397 tempest-AttachInterfacesV270Test-893122844 tempest-AttachInterfacesV270Test-893122844-project-member] Lock "cfae9bf8-012a-4286-b978-bba8a913bba2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.672s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.629261] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c5051a3-02b5-471c-9a83-cfd7cc4a6f0c req-407af8a0-02f3-4495-83cb-ad73496fcd6d service nova] Releasing lock "refresh_cache-1f3792c0-9f86-4d76-a1a6-28d492869046" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.656722] env[62525]: DEBUG nova.compute.manager [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Received event network-changed-151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1430.656895] env[62525]: DEBUG nova.compute.manager [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Refreshing instance network info cache due to event network-changed-151d8aa1-065a-409f-9d41-61d553ade236. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1430.657119] env[62525]: DEBUG oslo_concurrency.lockutils [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] Acquiring lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.657262] env[62525]: DEBUG oslo_concurrency.lockutils [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] Acquired lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.657420] env[62525]: DEBUG nova.network.neutron [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Refreshing network info cache for port 151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1430.681917] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61141} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.682199] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e8864d73-35e6-490b-a07c-e8cac8baf880/e8864d73-35e6-490b-a07c-e8cac8baf880.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1430.682405] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1430.682686] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-253c4b56-bc08-4170-a57a-273d092efc13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.692159] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1430.692159] env[62525]: value = "task-1781139" [ 1430.692159] env[62525]: _type = "Task" [ 1430.692159] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.703509] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.884763] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781138, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.204951] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074799} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.207398] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1431.208426] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7016bb-91b1-4781-a215-6dcf81bd6070 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.239593] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] e8864d73-35e6-490b-a07c-e8cac8baf880/e8864d73-35e6-490b-a07c-e8cac8baf880.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1431.245381] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e14c8673-11f2-481f-ba28-982f4b92a752 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.293504] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1431.293504] env[62525]: value = "task-1781140" [ 1431.293504] env[62525]: _type = "Task" [ 1431.293504] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.312486] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781140, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.316571] env[62525]: INFO nova.compute.manager [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Unrescuing [ 1431.316841] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.317158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.317724] env[62525]: DEBUG nova.network.neutron [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.391306] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781138, 'name': CreateVM_Task, 'duration_secs': 0.601342} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.394615] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1431.397921] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.398390] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.398681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1431.399019] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04809a68-7d83-4c75-8be0-80a6c1a4146b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.405426] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1431.405426] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e030e8-b451-389c-8c91-b3e8968e8761" [ 1431.405426] env[62525]: _type = "Task" [ 1431.405426] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.418837] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e030e8-b451-389c-8c91-b3e8968e8761, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.477400] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a891e84-f875-4535-961b-b5ae477e6e4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.486424] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44902e12-a421-4dc5-b901-b3294ebac539 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.524713] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc7be12-0e0f-4f2d-b6f1-c2f8c35493b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.534233] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade95b60-edfe-48be-94fa-db4d7123c0c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.550409] env[62525]: DEBUG nova.compute.provider_tree [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.623685] env[62525]: DEBUG nova.network.neutron [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updated VIF entry in instance network info cache for port 151d8aa1-065a-409f-9d41-61d553ade236. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1431.624083] env[62525]: DEBUG nova.network.neutron [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updating instance_info_cache with network_info: [{"id": "151d8aa1-065a-409f-9d41-61d553ade236", "address": "fa:16:3e:e1:36:e6", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151d8aa1-06", "ovs_interfaceid": "151d8aa1-065a-409f-9d41-61d553ade236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.804927] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781140, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.917297] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e030e8-b451-389c-8c91-b3e8968e8761, 'name': SearchDatastore_Task, 'duration_secs': 0.048476} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.917575] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.917815] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.918061] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.918201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.918401] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.918688] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-262d8432-d84d-4c9d-a5dd-b71e0b875947 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.929218] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.929432] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.930242] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44d038ee-f98a-4402-8393-07c7a689dff7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.936725] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1431.936725] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ad478c-c7b1-d641-883a-e936477f1a9b" [ 1431.936725] env[62525]: _type = "Task" [ 1431.936725] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.945805] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ad478c-c7b1-d641-883a-e936477f1a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.055776] env[62525]: DEBUG nova.scheduler.client.report [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.127598] env[62525]: DEBUG oslo_concurrency.lockutils [req-4bb00953-13a3-493c-9307-ef2f9517b264 req-72c32ef5-f0fc-4d6a-bf88-f24251a05c76 service nova] Releasing lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.313416] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781140, 'name': ReconfigVM_Task, 'duration_secs': 0.643188} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.313416] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfigured VM instance instance-00000029 to attach disk [datastore1] e8864d73-35e6-490b-a07c-e8cac8baf880/e8864d73-35e6-490b-a07c-e8cac8baf880.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1432.318021] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bb0ba1b-2918-4a56-9369-992cf30b0e25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.328018] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1432.328018] env[62525]: value = "task-1781141" [ 1432.328018] env[62525]: _type = "Task" [ 1432.328018] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.343500] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781141, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.427451] env[62525]: DEBUG nova.network.neutron [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.449337] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ad478c-c7b1-d641-883a-e936477f1a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.036309} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.450534] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52d2a74e-f13b-4285-b4ac-d57efcdc233f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.457607] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1432.457607] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cdfa03-3069-2fcc-58b9-bc29fd614276" [ 1432.457607] env[62525]: _type = "Task" [ 1432.457607] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.470941] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdfa03-3069-2fcc-58b9-bc29fd614276, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.562021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.562021] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1432.563203] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.905s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.565550] env[62525]: INFO nova.compute.claims [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.840978] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781141, 'name': Rename_Task, 'duration_secs': 0.315752} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.840978] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1432.843201] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b6f37bf-1413-4871-80e3-97a7e7bfe191 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.852466] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1432.852466] env[62525]: value = "task-1781142" [ 1432.852466] env[62525]: _type = "Task" [ 1432.852466] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.862266] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.930310] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.931097] env[62525]: DEBUG nova.objects.instance [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'flavor' on Instance uuid fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1432.973509] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdfa03-3069-2fcc-58b9-bc29fd614276, 'name': SearchDatastore_Task, 'duration_secs': 0.013157} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.973894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.973894] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d/29aaac3b-1f0e-40fe-9805-a0e6e6ae597d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.974209] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4c1db5a-ac32-40ce-b244-e9e01a3b25b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.982421] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1432.982421] env[62525]: value = "task-1781143" [ 1432.982421] env[62525]: _type = "Task" [ 1432.982421] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.993095] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.072595] env[62525]: DEBUG nova.compute.utils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1433.077711] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1433.367715] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781142, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.439026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44c3f15-c05d-4978-b560-a647e6194a13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.465891] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1433.466717] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4258bb53-b052-4fc3-8009-0ddf416b512b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.479377] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1433.479377] env[62525]: value = "task-1781144" [ 1433.479377] env[62525]: _type = "Task" [ 1433.479377] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.488813] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.501035] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781143, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.577756] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1433.864727] env[62525]: DEBUG oslo_vmware.api [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781142, 'name': PowerOnVM_Task, 'duration_secs': 0.539947} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.865116] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1433.865375] env[62525]: INFO nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Took 9.04 seconds to spawn the instance on the hypervisor. [ 1433.865608] env[62525]: DEBUG nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1433.866855] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eb7bcd-b0cc-4e89-85e7-ea7cd11cffe9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.990304] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781144, 'name': PowerOffVM_Task, 'duration_secs': 0.2364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.995479] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1434.000745] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfiguring VM instance instance-00000024 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1434.001505] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b51a06-5b54-46e5-9151-261eec0ecdc9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.023202] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518965} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.024523] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d/29aaac3b-1f0e-40fe-9805-a0e6e6ae597d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1434.024749] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1434.025083] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1434.025083] env[62525]: value = "task-1781145" [ 1434.025083] env[62525]: _type = "Task" [ 1434.025083] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.025283] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b26c6d09-a8a1-4de6-8866-633a5975413d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.039354] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781145, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.040717] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1434.040717] env[62525]: value = "task-1781146" [ 1434.040717] env[62525]: _type = "Task" [ 1434.040717] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.059241] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.275364] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37881ce2-8391-4aab-8cdb-005890e5c17e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.285556] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499ebe80-2805-4fcc-ad06-e707f02e509c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.320287] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3082a1-da83-4a9d-940a-6aa29764d5de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.329390] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f38c651-01aa-4039-ab10-f0fa5dacde4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.344599] env[62525]: DEBUG nova.compute.provider_tree [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1434.386583] env[62525]: INFO nova.compute.manager [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Took 50.67 seconds to build instance. [ 1434.539491] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.550426] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.305809} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.550738] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1434.551542] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580c32a2-203f-4455-aaf8-a40b820cbe81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.574601] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d/29aaac3b-1f0e-40fe-9805-a0e6e6ae597d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1434.574918] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-660ba880-1b03-4eaa-a45e-71e9b368c70b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.590796] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1434.600311] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1434.600311] env[62525]: value = "task-1781147" [ 1434.600311] env[62525]: _type = "Task" [ 1434.600311] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.609868] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.626040] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1434.626440] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1434.626654] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1434.626654] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1434.626804] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1434.627755] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1434.627755] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1434.627755] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1434.627930] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1434.627977] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1434.628470] env[62525]: DEBUG nova.virt.hardware [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1434.629134] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18548c80-742d-44e6-8b23-6a90f84e7859 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.637927] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8665996-2b93-4ac2-ad72-fd08d534bc5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.655234] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1434.661612] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Creating folder: Project (cd98ccb13a0a47efb0d3f071ba6710d4). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1434.661970] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f49ee73-fb94-4051-9448-9fad037ac586 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.674327] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Created folder: Project (cd98ccb13a0a47efb0d3f071ba6710d4) in parent group-v369553. [ 1434.674327] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Creating folder: Instances. Parent ref: group-v369676. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1434.674327] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f8c06cf-a713-4efc-8bba-2326681950f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.686914] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Created folder: Instances in parent group-v369676. [ 1434.687109] env[62525]: DEBUG oslo.service.loopingcall [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.687322] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1434.687535] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-427d69aa-c64a-4774-970c-b91e55636a63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.711985] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1434.711985] env[62525]: value = "task-1781150" [ 1434.711985] env[62525]: _type = "Task" [ 1434.711985] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.720335] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781150, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.847817] env[62525]: DEBUG nova.scheduler.client.report [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1434.887465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-966a038b-f4e3-4b75-961f-171b0ae67f53 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.526s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.039790] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781145, 'name': ReconfigVM_Task, 'duration_secs': 0.641015} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.040112] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Reconfigured VM instance instance-00000024 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1435.041260] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1435.041260] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-782da42d-8fde-4e66-b33c-4834ee64fa3a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.048654] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1435.048654] env[62525]: value = "task-1781151" [ 1435.048654] env[62525]: _type = "Task" [ 1435.048654] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.057147] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.110507] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781147, 'name': ReconfigVM_Task, 'duration_secs': 0.291914} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.110811] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d/29aaac3b-1f0e-40fe-9805-a0e6e6ae597d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.111627] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08af6b4d-0f82-4727-b7d2-bfb273045e08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.119171] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1435.119171] env[62525]: value = "task-1781152" [ 1435.119171] env[62525]: _type = "Task" [ 1435.119171] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.128151] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781152, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.222825] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781150, 'name': CreateVM_Task, 'duration_secs': 0.331883} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.223162] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1435.223844] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.224166] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.224704] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1435.225071] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f971355-abdf-4b43-8681-708a5bd6edd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.233020] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1435.233020] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52507454-fe0d-eca2-c8f9-e51b3a89c1d0" [ 1435.233020] env[62525]: _type = "Task" [ 1435.233020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.241104] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52507454-fe0d-eca2-c8f9-e51b3a89c1d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.356156] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.791s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.356156] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1435.357467] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.836s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.363405] env[62525]: DEBUG nova.objects.instance [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lazy-loading 'resources' on Instance uuid e3f3fc2c-0060-4521-8aa3-da37209aee81 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1435.392301] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1435.562572] env[62525]: DEBUG oslo_vmware.api [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781151, 'name': PowerOnVM_Task, 'duration_secs': 0.375525} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.562847] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1435.563112] env[62525]: DEBUG nova.compute.manager [None req-9ae1d5d0-93cb-487f-9b55-67e4b7bb274c tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1435.563911] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3284f3-8eee-49e4-8d80-96de64a05965 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.572008] env[62525]: DEBUG nova.compute.manager [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Received event network-changed-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1435.573467] env[62525]: DEBUG nova.compute.manager [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Refreshing instance network info cache due to event network-changed-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1435.573467] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] Acquiring lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.573467] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] Acquired lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.573467] env[62525]: DEBUG nova.network.neutron [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Refreshing network info cache for port c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1435.634017] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781152, 'name': Rename_Task, 'duration_secs': 0.15941} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.634017] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1435.634017] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35d437fe-227f-4afd-a7d2-fab412d5d47d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.637952] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1435.637952] env[62525]: value = "task-1781153" [ 1435.637952] env[62525]: _type = "Task" [ 1435.637952] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.647018] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.741785] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52507454-fe0d-eca2-c8f9-e51b3a89c1d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.742104] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.742331] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1435.742555] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.742693] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.742903] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.743302] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1782c49f-2f02-4ef0-8008-dadfc642e6c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.753211] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.753532] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1435.754998] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-700abef7-f8b5-4f31-80af-fd6fd82442af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.761673] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1435.761673] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52455764-34cb-b5d2-523d-29cf2d320e41" [ 1435.761673] env[62525]: _type = "Task" [ 1435.761673] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.773203] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52455764-34cb-b5d2-523d-29cf2d320e41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.868606] env[62525]: DEBUG nova.compute.utils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1435.870622] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1435.870936] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1435.917113] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.989572] env[62525]: DEBUG nova.policy [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0deb1ab43142f29a15397a2e23d048', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105f108590e14c649fff545b5b96f4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1436.153335] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781153, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.276877] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52455764-34cb-b5d2-523d-29cf2d320e41, 'name': SearchDatastore_Task, 'duration_secs': 0.009295} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.284240] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25d46e3f-6e5c-453d-8bda-6249260e7630 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.291135] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1436.291135] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5277d2c3-f52c-7975-b234-5b8189c056d0" [ 1436.291135] env[62525]: _type = "Task" [ 1436.291135] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.302953] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5277d2c3-f52c-7975-b234-5b8189c056d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.373912] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1436.382177] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4ba91c-6518-41e5-8d7f-313221284813 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.389665] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b2667d-c348-449f-b172-bcd064cfe315 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.393976] env[62525]: DEBUG nova.network.neutron [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updated VIF entry in instance network info cache for port c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1436.394912] env[62525]: DEBUG nova.network.neutron [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating instance_info_cache with network_info: [{"id": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "address": "fa:16:3e:d9:21:74", "network": {"id": "26390d2b-b8a0-4c73-ba85-09aba5f224a6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1630990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f146b0ac3df644fe9b0b9b9bef48c34d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9bb49cb-09", "ovs_interfaceid": "c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.430966] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee95905-eac3-4ae9-a531-064b29a3f0f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.440217] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135a2c49-914c-49cd-8b07-70fcde30d305 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.460125] env[62525]: DEBUG nova.compute.provider_tree [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.472132] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Successfully created port: c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1436.649763] env[62525]: DEBUG oslo_vmware.api [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781153, 'name': PowerOnVM_Task, 'duration_secs': 0.588411} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.650179] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1436.650269] env[62525]: INFO nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1436.650435] env[62525]: DEBUG nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1436.651234] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e9d1ec-20de-4a15-8772-b09d094976ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.807891] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5277d2c3-f52c-7975-b234-5b8189c056d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010354} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.807891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.807891] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1436.807891] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f80453c3-af86-4799-807b-de7ed65366ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.815371] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1436.815371] env[62525]: value = "task-1781154" [ 1436.815371] env[62525]: _type = "Task" [ 1436.815371] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.825967] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.900089] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d6ae2b9-6aaa-4c81-abf8-da610544d69a req-6eb86a61-70b9-4c42-843a-0f103e469907 service nova] Releasing lock "refresh_cache-e8864d73-35e6-490b-a07c-e8cac8baf880" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.963360] env[62525]: DEBUG nova.scheduler.client.report [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.014351] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.016615] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.016615] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.016615] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.016615] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.019249] env[62525]: INFO nova.compute.manager [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Terminating instance [ 1437.022712] env[62525]: DEBUG nova.compute.manager [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1437.022712] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1437.024089] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1977ee-4750-4ffc-9955-7bd6f3dc6cec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.034865] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1437.036641] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e99751ef-defb-4255-b00c-0b817a891290 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.049030] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1437.049030] env[62525]: value = "task-1781155" [ 1437.049030] env[62525]: _type = "Task" [ 1437.049030] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.058828] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.170803] env[62525]: INFO nova.compute.manager [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Took 52.59 seconds to build instance. [ 1437.327102] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781154, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.384318] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1437.418789] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1437.419112] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1437.419209] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.419391] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1437.419645] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.419812] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1437.420594] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1437.421447] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1437.421486] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1437.422374] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1437.422615] env[62525]: DEBUG nova.virt.hardware [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1437.423507] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4da77e-c960-42d4-879e-35ab9be88cb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.437925] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73279c20-05ec-4bb6-8f72-bd6d3bbbe600 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.475291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.475291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.475611] env[62525]: DEBUG nova.objects.instance [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lazy-loading 'resources' on Instance uuid f2240974-0fa4-4f59-ae0c-b9da52f9600e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1437.504730] env[62525]: INFO nova.scheduler.client.report [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Deleted allocations for instance e3f3fc2c-0060-4521-8aa3-da37209aee81 [ 1437.563366] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781155, 'name': PowerOffVM_Task, 'duration_secs': 0.306006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.563366] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1437.563366] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1437.563366] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d13a2bfe-4ccf-41c2-8532-6c277fbffb62 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.642026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1437.644027] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1437.644027] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Deleting the datastore file [datastore1] 3455a540-7fbc-46ba-b7d6-84a345c0463e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1437.644027] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fa435a6-685b-401f-987d-96e8e572aa89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.655015] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for the task: (returnval){ [ 1437.655015] env[62525]: value = "task-1781157" [ 1437.655015] env[62525]: _type = "Task" [ 1437.655015] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.661959] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.674853] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b479fbbe-d17a-4533-8db8-b2fd7d41a2fe tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.859s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.829455] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539382} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.830245] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1437.830671] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1437.833443] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32a549c7-ec2f-4627-8388-a0bfc5fdbef6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.841356] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1437.841356] env[62525]: value = "task-1781158" [ 1437.841356] env[62525]: _type = "Task" [ 1437.841356] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.851259] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.015194] env[62525]: DEBUG oslo_concurrency.lockutils [None req-333d7425-be9a-4839-a892-15f22cde1882 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066 tempest-FloatingIPsAssociationNegativeTestJSON-2006257066-project-member] Lock "e3f3fc2c-0060-4521-8aa3-da37209aee81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.066s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.051379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.051607] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.094247] env[62525]: DEBUG nova.compute.manager [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Received event network-vif-plugged-c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1438.094401] env[62525]: DEBUG oslo_concurrency.lockutils [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] Acquiring lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.094614] env[62525]: DEBUG oslo_concurrency.lockutils [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.094771] env[62525]: DEBUG oslo_concurrency.lockutils [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.094926] env[62525]: DEBUG nova.compute.manager [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] No waiting events found dispatching network-vif-plugged-c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1438.095220] env[62525]: WARNING nova.compute.manager [req-43fa85e6-da62-4e8f-a5b9-24df97b527fa req-06004bdd-afcb-46d2-a7b9-afef1c46e028 service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Received unexpected event network-vif-plugged-c89b9d12-f94d-4161-bae7-150d736f9e86 for instance with vm_state building and task_state spawning. [ 1438.172348] env[62525]: DEBUG oslo_vmware.api [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Task: {'id': task-1781157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306267} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.172766] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1438.172992] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1438.173064] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1438.173233] env[62525]: INFO nova.compute.manager [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1438.173536] env[62525]: DEBUG oslo.service.loopingcall [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.173892] env[62525]: DEBUG nova.compute.manager [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1438.173892] env[62525]: DEBUG nova.network.neutron [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1438.175750] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1438.217436] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Successfully updated port: c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1438.353856] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.495007} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.354230] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1438.355223] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c60d80-d9fc-4b88-bba1-22455608bcdf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.383256] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.385942] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a7d21a5-be6e-4435-9c54-1bf5e6f0693a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.410527] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1438.410527] env[62525]: value = "task-1781159" [ 1438.410527] env[62525]: _type = "Task" [ 1438.410527] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.422050] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781159, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.493294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb4b1a0-cfb1-4083-9064-ba85ba78bb69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.501278] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a9711e-9d26-482e-83e6-5189a869717f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.542659] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5772cbe-8100-4310-9097-b0bb8d0ec631 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.550536] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2064fe11-c4cd-4300-b1b5-aff19a611220 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.570370] env[62525]: DEBUG nova.compute.provider_tree [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.701557] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.718433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.718433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.718433] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1438.921496] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781159, 'name': ReconfigVM_Task, 'duration_secs': 0.287878} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.921790] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.922461] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df4fa9cc-3945-44f8-acbd-32c6d7721639 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.930620] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1438.930620] env[62525]: value = "task-1781160" [ 1438.930620] env[62525]: _type = "Task" [ 1438.930620] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.940428] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781160, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.976693] env[62525]: DEBUG nova.compute.manager [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Received event network-changed-151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1438.976693] env[62525]: DEBUG nova.compute.manager [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Refreshing instance network info cache due to event network-changed-151d8aa1-065a-409f-9d41-61d553ade236. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1438.976693] env[62525]: DEBUG oslo_concurrency.lockutils [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] Acquiring lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.976693] env[62525]: DEBUG oslo_concurrency.lockutils [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] Acquired lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.976693] env[62525]: DEBUG nova.network.neutron [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Refreshing network info cache for port 151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.074688] env[62525]: DEBUG nova.scheduler.client.report [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1439.253704] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.441121] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781160, 'name': Rename_Task, 'duration_secs': 0.183544} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.441634] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.441924] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b778a866-b616-45f8-baf2-a664bf301c5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.448248] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1439.448248] env[62525]: value = "task-1781161" [ 1439.448248] env[62525]: _type = "Task" [ 1439.448248] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.458459] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.583491] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.594653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.873s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.597865] env[62525]: INFO nova.compute.claims [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1439.610346] env[62525]: DEBUG nova.network.neutron [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Updating instance_info_cache with network_info: [{"id": "c89b9d12-f94d-4161-bae7-150d736f9e86", "address": "fa:16:3e:54:8b:42", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc89b9d12-f9", "ovs_interfaceid": "c89b9d12-f94d-4161-bae7-150d736f9e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.626648] env[62525]: INFO nova.scheduler.client.report [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Deleted allocations for instance f2240974-0fa4-4f59-ae0c-b9da52f9600e [ 1439.882266] env[62525]: DEBUG nova.network.neutron [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updated VIF entry in instance network info cache for port 151d8aa1-065a-409f-9d41-61d553ade236. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.883049] env[62525]: DEBUG nova.network.neutron [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updating instance_info_cache with network_info: [{"id": "151d8aa1-065a-409f-9d41-61d553ade236", "address": "fa:16:3e:e1:36:e6", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap151d8aa1-06", "ovs_interfaceid": "151d8aa1-065a-409f-9d41-61d553ade236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.967455] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781161, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.102958] env[62525]: DEBUG nova.network.neutron [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.113751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.114076] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance network_info: |[{"id": "c89b9d12-f94d-4161-bae7-150d736f9e86", "address": "fa:16:3e:54:8b:42", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc89b9d12-f9", "ovs_interfaceid": "c89b9d12-f94d-4161-bae7-150d736f9e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1440.115046] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:8b:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c89b9d12-f94d-4161-bae7-150d736f9e86', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1440.127165] env[62525]: DEBUG oslo.service.loopingcall [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.127337] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1440.127588] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-099e8684-6167-4dc3-862e-d0085cd65f4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.148952] env[62525]: DEBUG nova.compute.manager [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Received event network-changed-c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1440.149032] env[62525]: DEBUG nova.compute.manager [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Refreshing instance network info cache due to event network-changed-c89b9d12-f94d-4161-bae7-150d736f9e86. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1440.149400] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] Acquiring lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.149400] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] Acquired lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.149530] env[62525]: DEBUG nova.network.neutron [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Refreshing network info cache for port c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1440.150749] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ca9ef263-1098-4a88-8ff1-1b5c9c946467 tempest-InstanceActionsV221TestJSON-780312303 tempest-InstanceActionsV221TestJSON-780312303-project-member] Lock "f2240974-0fa4-4f59-ae0c-b9da52f9600e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.533s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.159017] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1440.159017] env[62525]: value = "task-1781162" [ 1440.159017] env[62525]: _type = "Task" [ 1440.159017] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.168227] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781162, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.386432] env[62525]: DEBUG oslo_concurrency.lockutils [req-dea0a501-cd01-49a8-ad36-08fcd1359fb8 req-b96b46b7-df21-4251-9826-f412616d9fcf service nova] Releasing lock "refresh_cache-29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.461419] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781161, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.528139] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.529965] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.606260] env[62525]: INFO nova.compute.manager [-] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Took 2.43 seconds to deallocate network for instance. [ 1440.670375] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781162, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.943623] env[62525]: DEBUG nova.network.neutron [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Updated VIF entry in instance network info cache for port c89b9d12-f94d-4161-bae7-150d736f9e86. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.945437] env[62525]: DEBUG nova.network.neutron [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Updating instance_info_cache with network_info: [{"id": "c89b9d12-f94d-4161-bae7-150d736f9e86", "address": "fa:16:3e:54:8b:42", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc89b9d12-f9", "ovs_interfaceid": "c89b9d12-f94d-4161-bae7-150d736f9e86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.963932] env[62525]: DEBUG oslo_vmware.api [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781161, 'name': PowerOnVM_Task, 'duration_secs': 1.141425} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.964385] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.964655] env[62525]: INFO nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1440.965032] env[62525]: DEBUG nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.966091] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63befcd1-6027-4870-a11f-e024b0eb7a31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.056181] env[62525]: DEBUG nova.compute.manager [req-43a32587-8f92-4c65-81f2-bde4ddcce33d req-a1bf910c-5b74-483c-b681-acfbb6a51296 service nova] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Received event network-vif-deleted-91221eae-8243-44e9-a87d-e67faa8613b5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.111805] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a560de-820f-486b-b983-9c8e663bceea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.117920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.125797] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe101b1-5b9d-4ecc-bd1d-7d61affed1ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.168142] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8b0656-88f4-4676-923f-6a0024c13da3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.180902] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcbea58-fa52-4d24-8d87-d8240c5dc2c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.185515] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781162, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.198196] env[62525]: DEBUG nova.compute.provider_tree [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.453053] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f31c65-7b38-4f47-ab29-2296f1846f7e req-bc508954-aea4-4c0e-ab56-61a40a4c1b9e service nova] Releasing lock "refresh_cache-f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.488021] env[62525]: INFO nova.compute.manager [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Took 49.65 seconds to build instance. [ 1441.674892] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781162, 'name': CreateVM_Task, 'duration_secs': 1.426037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.675202] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1441.675928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.676173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.676553] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1441.676822] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6107d5a3-fcb1-48e2-8c33-783a1dcdc5e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.681702] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1441.681702] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52888f60-925f-1cea-2072-3a05a177f60b" [ 1441.681702] env[62525]: _type = "Task" [ 1441.681702] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.691425] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52888f60-925f-1cea-2072-3a05a177f60b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.701573] env[62525]: DEBUG nova.scheduler.client.report [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1441.991397] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86510d13-cf01-47fb-a220-92ab3f047685 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.557s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.193952] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52888f60-925f-1cea-2072-3a05a177f60b, 'name': SearchDatastore_Task, 'duration_secs': 0.014989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.193952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.194234] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1442.194286] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.194416] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.194595] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.195085] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-484b9053-85e8-4c88-b5b3-bed50671a7b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.206511] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.206777] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1442.208025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.208485] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1442.215906] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1449af02-53b8-4a83-b000-2b4a9369f4d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.220254] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.943s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.221891] env[62525]: INFO nova.compute.claims [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.231117] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1442.231117] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d5aaf0-12f4-d5d8-d570-e9f2f2f0150a" [ 1442.231117] env[62525]: _type = "Task" [ 1442.231117] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.238976] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d5aaf0-12f4-d5d8-d570-e9f2f2f0150a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.494485] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1442.535032] env[62525]: INFO nova.compute.manager [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Rebuilding instance [ 1442.587922] env[62525]: DEBUG nova.compute.manager [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1442.588835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c878158a-4d0d-4792-a814-665034055503 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.717667] env[62525]: DEBUG nova.compute.utils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1442.719547] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1442.720382] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1442.741775] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d5aaf0-12f4-d5d8-d570-e9f2f2f0150a, 'name': SearchDatastore_Task, 'duration_secs': 0.022684} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.742751] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-330cf9fc-33ec-42b4-89d4-afcc4d05257b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.748295] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1442.748295] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f2a9f2-eff7-c331-0c07-93038f34f3e9" [ 1442.748295] env[62525]: _type = "Task" [ 1442.748295] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.757192] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f2a9f2-eff7-c331-0c07-93038f34f3e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.765664] env[62525]: DEBUG nova.policy [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62d1b3275d194480ab34f8d437934dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f95b8120cae4ff68fff82bf8e933c24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.022043] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.104746] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1443.104945] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0eca64b0-c0cf-42d3-836e-7e6237ca771f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.114230] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1443.114230] env[62525]: value = "task-1781163" [ 1443.114230] env[62525]: _type = "Task" [ 1443.114230] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.123914] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.130903] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Successfully created port: e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1443.224520] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1443.262915] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f2a9f2-eff7-c331-0c07-93038f34f3e9, 'name': SearchDatastore_Task, 'duration_secs': 0.012238} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.266176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.266564] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b/f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1443.267102] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-733c0bcf-d48f-44d2-bdb6-6aa5551c5284 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.276511] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1443.276511] env[62525]: value = "task-1781164" [ 1443.276511] env[62525]: _type = "Task" [ 1443.276511] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.290483] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.627476] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781163, 'name': PowerOffVM_Task, 'duration_secs': 0.211315} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.632743] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1443.633426] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1443.636412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c5853b-e47c-4287-92ea-af769164cd87 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.646419] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1443.649409] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d44a6a55-968e-4c85-bf0a-99410bebe6ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.682291] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1443.682627] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1443.682935] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Deleting the datastore file [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1443.683518] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd71a081-6b15-4298-bd9e-1d706d45631f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.695217] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1443.695217] env[62525]: value = "task-1781166" [ 1443.695217] env[62525]: _type = "Task" [ 1443.695217] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.712084] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.725070] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558cba32-3f13-4b9a-9c40-9f177dc69048 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.739821] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a92f6d-31e3-4da7-9195-888c83b3f7fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.785521] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e508e5-1be8-487c-9261-0e2ca4b463b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.794649] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781164, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.798399] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14be0591-e146-4e21-8fcb-63515c4461a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.813727] env[62525]: DEBUG nova.compute.provider_tree [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.193840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.194127] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.194996] env[62525]: DEBUG nova.compute.manager [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.195236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc430961-e44c-44b8-a9af-970390c12562 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.208934] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.316968} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.208934] env[62525]: DEBUG nova.compute.manager [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1444.208934] env[62525]: DEBUG nova.objects.instance [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'flavor' on Instance uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.212940] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1444.212940] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1444.212940] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1444.235472] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1444.263325] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.263583] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.263729] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.263910] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.264069] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.264212] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.264412] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.264569] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.264773] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.264908] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.265075] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.265952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30584691-8f6e-4960-bc76-459928a9ac55 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.274240] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666c0db7-7f5e-467c-b9a8-5e2d20037f17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.296160] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671368} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.296377] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b/f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1444.296583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1444.296830] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b05f7ea-d1c2-4d26-8a0c-c490d6226d01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.303306] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1444.303306] env[62525]: value = "task-1781167" [ 1444.303306] env[62525]: _type = "Task" [ 1444.303306] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.311035] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.316997] env[62525]: DEBUG nova.scheduler.client.report [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1444.641947] env[62525]: DEBUG nova.compute.manager [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Received event network-vif-plugged-e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1444.642235] env[62525]: DEBUG oslo_concurrency.lockutils [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] Acquiring lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.642415] env[62525]: DEBUG oslo_concurrency.lockutils [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.642578] env[62525]: DEBUG oslo_concurrency.lockutils [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.642753] env[62525]: DEBUG nova.compute.manager [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] No waiting events found dispatching network-vif-plugged-e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1444.642913] env[62525]: WARNING nova.compute.manager [req-d57b2a5a-3e0e-4c9a-a0fb-1e26c5a7d8f5 req-7bc20920-9172-4ee7-8de7-fca5939bb290 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Received unexpected event network-vif-plugged-e38028c4-293a-4544-8825-58a9c035c2f0 for instance with vm_state building and task_state spawning. [ 1444.720933] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1444.721229] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-264e8495-13a5-4dd9-994a-9daa560a28e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.730578] env[62525]: DEBUG oslo_vmware.api [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1444.730578] env[62525]: value = "task-1781168" [ 1444.730578] env[62525]: _type = "Task" [ 1444.730578] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.738999] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Successfully updated port: e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.743663] env[62525]: DEBUG oslo_vmware.api [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781168, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.813557] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070996} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.814058] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1444.815108] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0e8980-1fa2-4999-b1d0-ba7bb0b76a6e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.830969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.831543] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1444.845751] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b/f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1444.846530] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 40.925s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.848842] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd52084e-a469-47d6-a37c-81733017538b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.873022] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1444.873022] env[62525]: value = "task-1781169" [ 1444.873022] env[62525]: _type = "Task" [ 1444.873022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.880921] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.243624] env[62525]: DEBUG oslo_vmware.api [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781168, 'name': PowerOffVM_Task, 'duration_secs': 0.259862} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.243918] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.244142] env[62525]: DEBUG nova.compute.manager [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1445.244756] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.244883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.245033] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1445.246685] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505d48db-fdf8-487e-bacc-f23146d2db4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.271124] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1445.271124] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1445.271124] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.271124] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1445.271528] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.271528] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1445.271528] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1445.271528] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1445.271528] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1445.271787] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1445.271787] env[62525]: DEBUG nova.virt.hardware [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1445.272640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea8fdfb-ae6d-48bc-a51b-dd926b642018 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.280506] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc05a980-fa0c-47ca-a0ae-5ccbd11f169c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.296897] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.305408] env[62525]: DEBUG oslo.service.loopingcall [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.305533] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.305864] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bbd1555-55ec-468d-9e3e-3837b22eb615 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.325363] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.325363] env[62525]: value = "task-1781170" [ 1445.325363] env[62525]: _type = "Task" [ 1445.325363] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.334439] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781170, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.348480] env[62525]: DEBUG nova.compute.utils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.349959] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1445.350275] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1445.385812] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781169, 'name': ReconfigVM_Task, 'duration_secs': 0.436671} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.386106] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Reconfigured VM instance instance-0000002c to attach disk [datastore1] f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b/f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1445.387435] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09b0b27e-f715-417f-b781-2937b4ab2e20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.394325] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1445.394325] env[62525]: value = "task-1781171" [ 1445.394325] env[62525]: _type = "Task" [ 1445.394325] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.402478] env[62525]: DEBUG nova.policy [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62d1b3275d194480ab34f8d437934dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f95b8120cae4ff68fff82bf8e933c24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1445.410101] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781171, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.688413] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Successfully created port: 3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1445.778687] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8b32146f-2da9-4d9f-970f-5c9f9c53a155 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.584s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.805921] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1445.837590] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781170, 'name': CreateVM_Task, 'duration_secs': 0.260923} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.837771] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.838537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.838773] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.839145] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.841032] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-952dba69-ff83-4f0f-a526-54eac17c5d54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.845752] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1445.845752] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b3adc1-435a-e22e-19ff-b76faeaf2c20" [ 1445.845752] env[62525]: _type = "Task" [ 1445.845752] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.857262] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1445.860736] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b3adc1-435a-e22e-19ff-b76faeaf2c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.911521] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781171, 'name': Rename_Task, 'duration_secs': 0.156715} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.917122] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.917307] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c7603ce8-8471-4813-9faf-3667a205893c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.917500] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance deef59c8-f710-434d-bddc-f63bb3d518b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.918437] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 98334a1b-1a73-408f-93a4-6dc72764ebfc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1445.918437] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 56cb0d0c-a7dd-4158-8bed-ddff050e0226 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.918437] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance aa639aa3-d21c-4923-bc39-56e648c566fb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1445.918437] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f93669f2-c59d-4f3f-85a2-a60d714326ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.918643] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1445.918740] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.919545] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 3455a540-7fbc-46ba-b7d6-84a345c0463e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1445.920177] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.920177] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.920353] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f589dc1-9244-475f-86d0-4b69b511508b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.920430] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance d2e7c558-02af-477c-b996-239ef14ed75b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.920626] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1f3792c0-9f86-4d76-a1a6-28d492869046 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.920821] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8864d73-35e6-490b-a07c-e8cac8baf880 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.921027] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.921219] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.921405] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.921590] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1fe967d9-351a-4b44-b7cb-d3c8395d9516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.921777] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 10f10329-9a7d-4e1b-8fb4-90350169e518 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1445.923651] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.924866] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0197b969-2a2e-43ee-8100-22a71b1bb3f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.934775] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1445.934775] env[62525]: value = "task-1781172" [ 1445.934775] env[62525]: _type = "Task" [ 1445.934775] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.952043] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781172, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.055688] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Updating instance_info_cache with network_info: [{"id": "e38028c4-293a-4544-8825-58a9c035c2f0", "address": "fa:16:3e:90:ca:76", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38028c4-29", "ovs_interfaceid": "e38028c4-293a-4544-8825-58a9c035c2f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.357830] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b3adc1-435a-e22e-19ff-b76faeaf2c20, 'name': SearchDatastore_Task, 'duration_secs': 0.016687} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.358171] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.359083] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.359083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.359083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.359083] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.359237] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6047b8ad-17ee-409c-b2c0-95566b12d58a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.374573] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.374779] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1446.375549] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c169427c-a0bb-46e4-be24-3d9b4e49374d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.381866] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1446.381866] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3dfc-40b0-b6ab-8cc3-ac72f91127c9" [ 1446.381866] env[62525]: _type = "Task" [ 1446.381866] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.392776] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3dfc-40b0-b6ab-8cc3-ac72f91127c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.430323] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 7c8474fd-2ca5-4ecc-b2e6-4248baafd639 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1446.446078] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.558739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.559180] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Instance network_info: |[{"id": "e38028c4-293a-4544-8825-58a9c035c2f0", "address": "fa:16:3e:90:ca:76", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38028c4-29", "ovs_interfaceid": "e38028c4-293a-4544-8825-58a9c035c2f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1446.559648] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:ca:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e38028c4-293a-4544-8825-58a9c035c2f0', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1446.567255] env[62525]: DEBUG oslo.service.loopingcall [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1446.567495] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1446.567720] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-064635a5-941a-456a-bde7-4ed3cfde6ce0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.588189] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1446.588189] env[62525]: value = "task-1781173" [ 1446.588189] env[62525]: _type = "Task" [ 1446.588189] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.595901] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781173, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.756291] env[62525]: DEBUG nova.compute.manager [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Received event network-changed-e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1446.756291] env[62525]: DEBUG nova.compute.manager [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Refreshing instance network info cache due to event network-changed-e38028c4-293a-4544-8825-58a9c035c2f0. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1446.757154] env[62525]: DEBUG oslo_concurrency.lockutils [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] Acquiring lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.757561] env[62525]: DEBUG oslo_concurrency.lockutils [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] Acquired lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.757925] env[62525]: DEBUG nova.network.neutron [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Refreshing network info cache for port e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1446.871026] env[62525]: DEBUG nova.objects.instance [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'flavor' on Instance uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1446.872657] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1446.893571] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc3dfc-40b0-b6ab-8cc3-ac72f91127c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009297} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.897701] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fab06bbb-acc3-44de-9281-9f56fdc45e5b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.908312] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1446.909033] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1446.909033] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1446.909033] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1446.909293] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1446.909435] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1446.909744] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1446.910036] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1446.910481] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1446.910481] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1446.910770] env[62525]: DEBUG nova.virt.hardware [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1446.912315] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219fdc4a-f469-483f-96c8-536e0144f134 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.916647] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1446.916647] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f38512-0b0d-2502-219e-7dd6387fc7ce" [ 1446.916647] env[62525]: _type = "Task" [ 1446.916647] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.927624] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3169b93-69fa-4274-8058-db0367f1ed4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.938160] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 61f05e69-5e90-47da-9f47-3651b580a23c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1446.939619] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f38512-0b0d-2502-219e-7dd6387fc7ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011374} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.943531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.943668] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1446.955340] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2e4d02f-a933-4955-9f61-812546a0c67c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.968699] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1446.968699] env[62525]: value = "task-1781174" [ 1446.968699] env[62525]: _type = "Task" [ 1446.968699] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.968994] env[62525]: DEBUG oslo_vmware.api [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781172, 'name': PowerOnVM_Task, 'duration_secs': 0.767503} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.969387] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.969635] env[62525]: INFO nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Took 9.59 seconds to spawn the instance on the hypervisor. [ 1446.969933] env[62525]: DEBUG nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1446.975282] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c7728c-d929-49cd-9d79-06e75e17c57c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.984852] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.021316] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.021555] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.098212] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781173, 'name': CreateVM_Task, 'duration_secs': 0.37832} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.098402] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1447.099103] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.099277] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.099588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1447.099851] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db9c814c-bd97-4466-9d50-93a11d192d41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.104454] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1447.104454] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c9a4c1-4170-affe-ee4a-cdd6b94841c5" [ 1447.104454] env[62525]: _type = "Task" [ 1447.104454] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.112469] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c9a4c1-4170-affe-ee4a-cdd6b94841c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.380214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.380214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.380388] env[62525]: DEBUG nova.network.neutron [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1447.380802] env[62525]: DEBUG nova.objects.instance [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'info_cache' on Instance uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1447.447094] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.479912] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781174, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.491603] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Successfully updated port: 3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1447.502915] env[62525]: INFO nova.compute.manager [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Took 53.87 seconds to build instance. [ 1447.562868] env[62525]: DEBUG nova.network.neutron [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Updated VIF entry in instance network info cache for port e38028c4-293a-4544-8825-58a9c035c2f0. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1447.563554] env[62525]: DEBUG nova.network.neutron [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Updating instance_info_cache with network_info: [{"id": "e38028c4-293a-4544-8825-58a9c035c2f0", "address": "fa:16:3e:90:ca:76", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38028c4-29", "ovs_interfaceid": "e38028c4-293a-4544-8825-58a9c035c2f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.622266] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c9a4c1-4170-affe-ee4a-cdd6b94841c5, 'name': SearchDatastore_Task, 'duration_secs': 0.011214} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.622797] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.623122] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1447.623422] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.623580] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.623860] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1447.624297] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7f0b935-1c6f-4d78-8fde-ecc80adef2df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.640469] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1447.640709] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1447.641933] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0fa3f7d-0c1a-4610-a855-96921e3921bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.649889] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1447.649889] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52882d36-f2c0-854f-e3d3-d1a2b43d2809" [ 1447.649889] env[62525]: _type = "Task" [ 1447.649889] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.662875] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52882d36-f2c0-854f-e3d3-d1a2b43d2809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.885296] env[62525]: DEBUG nova.objects.base [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Object Instance<6e9051e9-aa89-408f-8f62-533085dc1312> lazy-loaded attributes: flavor,info_cache {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1447.904296] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a0d4b0-447c-4a60-bcd7-aa1a1fbdee8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.910727] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Suspending the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1447.911245] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-886ab60f-491f-4df7-854f-9db65da3d804 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.920204] env[62525]: DEBUG oslo_vmware.api [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1447.920204] env[62525]: value = "task-1781175" [ 1447.920204] env[62525]: _type = "Task" [ 1447.920204] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.933184] env[62525]: DEBUG oslo_vmware.api [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781175, 'name': SuspendVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.950124] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.982263] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.946352} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.982263] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1447.982263] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1447.982263] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8321050-1159-4091-9ad4-065b689113c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.991706] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1447.991706] env[62525]: value = "task-1781176" [ 1447.991706] env[62525]: _type = "Task" [ 1447.991706] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.999924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.000173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.000397] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1448.002286] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781176, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.005211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0aea7f43-e020-4c58-b3e2-1bfd95cfd327 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.612s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.066409] env[62525]: DEBUG oslo_concurrency.lockutils [req-1948af1b-a5c6-4981-8078-e1ca3d4b1625 req-b0875d02-bb8d-416a-a969-47a0c09742c2 service nova] Releasing lock "refresh_cache-1fe967d9-351a-4b44-b7cb-d3c8395d9516" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.161228] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52882d36-f2c0-854f-e3d3-d1a2b43d2809, 'name': SearchDatastore_Task, 'duration_secs': 0.062675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.162114] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a335a379-49ac-4b2a-b654-799773f7754b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.168511] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1448.168511] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5293425b-6fab-2b54-047a-eec43e5d41c5" [ 1448.168511] env[62525]: _type = "Task" [ 1448.168511] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.177140] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5293425b-6fab-2b54-047a-eec43e5d41c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.430412] env[62525]: DEBUG oslo_vmware.api [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781175, 'name': SuspendVM_Task} progress is 58%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.453976] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 7a92bac8-9cee-41ed-81e3-08b48432fe7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1448.499024] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781176, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09877} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.499283] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1448.500268] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aaade1-6fa5-4536-a620-4f471fd8ac90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.514824] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1448.527341] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1448.528318] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fb6007d-9d2a-4657-8b55-31f916265e40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.551618] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1448.551618] env[62525]: value = "task-1781177" [ 1448.551618] env[62525]: _type = "Task" [ 1448.551618] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.562521] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781177, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.571634] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1448.684235] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5293425b-6fab-2b54-047a-eec43e5d41c5, 'name': SearchDatastore_Task, 'duration_secs': 0.017987} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.684235] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.684235] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1fe967d9-351a-4b44-b7cb-d3c8395d9516/1fe967d9-351a-4b44-b7cb-d3c8395d9516.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1448.684235] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b12b0919-cc84-43d1-96cc-d7a954e1148c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.690306] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1448.690306] env[62525]: value = "task-1781178" [ 1448.690306] env[62525]: _type = "Task" [ 1448.690306] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.699667] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.739955] env[62525]: DEBUG nova.network.neutron [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.778148] env[62525]: DEBUG nova.network.neutron [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Updating instance_info_cache with network_info: [{"id": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "address": "fa:16:3e:55:aa:7a", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3325b087-dc", "ovs_interfaceid": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.839484] env[62525]: DEBUG nova.compute.manager [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Received event network-vif-plugged-3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.839658] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Acquiring lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.840155] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.840155] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.840292] env[62525]: DEBUG nova.compute.manager [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] No waiting events found dispatching network-vif-plugged-3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.840374] env[62525]: WARNING nova.compute.manager [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Received unexpected event network-vif-plugged-3325b087-dcb4-4eae-9fb2-f584a769e45a for instance with vm_state building and task_state spawning. [ 1448.840600] env[62525]: DEBUG nova.compute.manager [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Received event network-changed-3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.840818] env[62525]: DEBUG nova.compute.manager [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Refreshing instance network info cache due to event network-changed-3325b087-dcb4-4eae-9fb2-f584a769e45a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1448.841055] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Acquiring lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.932760] env[62525]: DEBUG oslo_vmware.api [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781175, 'name': SuspendVM_Task, 'duration_secs': 0.649961} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.933191] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Suspended the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1448.933276] env[62525]: DEBUG nova.compute.manager [None req-e2b33448-0d2a-4bc5-b434-57eaa8c0bbc1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1448.934460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18494a1-c202-4efe-8dbc-fafd07735b07 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.957117] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance d8c7d102-46e6-40fe-a864-a72590af4982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1449.063760] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781177, 'name': ReconfigVM_Task, 'duration_secs': 0.305088} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.063760] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5/85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1449.063760] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ff86f92-2a04-46b7-92d7-9b2258edcaa1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.067630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.071947] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1449.071947] env[62525]: value = "task-1781179" [ 1449.071947] env[62525]: _type = "Task" [ 1449.071947] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.085475] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781179, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.203862] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781178, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.244184] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.281016] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.281405] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Instance network_info: |[{"id": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "address": "fa:16:3e:55:aa:7a", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3325b087-dc", "ovs_interfaceid": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1449.281706] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Acquired lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.281914] env[62525]: DEBUG nova.network.neutron [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Refreshing network info cache for port 3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1449.283206] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:aa:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3325b087-dcb4-4eae-9fb2-f584a769e45a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1449.291729] env[62525]: DEBUG oslo.service.loopingcall [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.295023] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1449.295558] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e886b247-ed77-4972-8e01-28a424f8f073 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.316512] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1449.316512] env[62525]: value = "task-1781180" [ 1449.316512] env[62525]: _type = "Task" [ 1449.316512] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.324355] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781180, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.460517] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 70313696-a9cc-499c-b9e6-329a71c4b915 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1449.585605] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781179, 'name': Rename_Task, 'duration_secs': 0.451171} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.586695] env[62525]: DEBUG nova.network.neutron [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Updated VIF entry in instance network info cache for port 3325b087-dcb4-4eae-9fb2-f584a769e45a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1449.587166] env[62525]: DEBUG nova.network.neutron [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Updating instance_info_cache with network_info: [{"id": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "address": "fa:16:3e:55:aa:7a", "network": {"id": "3a7138ad-f305-45b5-8c9a-79f27e75698d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-350116646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f95b8120cae4ff68fff82bf8e933c24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3325b087-dc", "ovs_interfaceid": "3325b087-dcb4-4eae-9fb2-f584a769e45a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.588476] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1449.589116] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a687812-2f87-4fea-ace9-61cbe4a588d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.597411] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1449.597411] env[62525]: value = "task-1781181" [ 1449.597411] env[62525]: _type = "Task" [ 1449.597411] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.610533] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.700470] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629513} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.700709] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1fe967d9-351a-4b44-b7cb-d3c8395d9516/1fe967d9-351a-4b44-b7cb-d3c8395d9516.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1449.700924] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1449.701203] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e4f4a10-131b-4a28-90ea-8e30e7b3f69b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.707693] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1449.707693] env[62525]: value = "task-1781182" [ 1449.707693] env[62525]: _type = "Task" [ 1449.707693] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.715790] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.747475] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1449.747794] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68ea9405-07fd-4bfd-93b7-3a41a604f0af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.755095] env[62525]: DEBUG oslo_vmware.api [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1449.755095] env[62525]: value = "task-1781183" [ 1449.755095] env[62525]: _type = "Task" [ 1449.755095] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.763968] env[62525]: DEBUG oslo_vmware.api [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.826744] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781180, 'name': CreateVM_Task, 'duration_secs': 0.39525} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.826918] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1449.827857] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.828151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.828622] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1449.828951] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03c7d668-fc97-416c-b15a-6c900593e889 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.833845] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1449.833845] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52181cfe-6a4a-63ba-9166-94eb801d7dad" [ 1449.833845] env[62525]: _type = "Task" [ 1449.833845] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.842798] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52181cfe-6a4a-63ba-9166-94eb801d7dad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.963660] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 94560d78-071c-419d-ad10-f42a5b2271a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1450.089914] env[62525]: DEBUG oslo_concurrency.lockutils [req-109510c2-65cb-42a3-8928-bb301aa684a7 req-e1563358-4f8e-47b4-aef8-1118944ffb59 service nova] Releasing lock "refresh_cache-10f10329-9a7d-4e1b-8fb4-90350169e518" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.109546] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.218519] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14418} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.218816] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1450.219644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727af5e6-896f-4220-b1cf-e9598d740390 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.243978] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 1fe967d9-351a-4b44-b7cb-d3c8395d9516/1fe967d9-351a-4b44-b7cb-d3c8395d9516.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1450.244733] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b89bedaa-f813-48bd-a3a7-6aa96aa80115 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.267105] env[62525]: DEBUG oslo_vmware.api [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781183, 'name': PowerOnVM_Task, 'duration_secs': 0.464659} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.268363] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1450.268554] env[62525]: DEBUG nova.compute.manager [None req-82cbae80-eba6-4290-b9d3-29910cbc52b9 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1450.268887] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1450.268887] env[62525]: value = "task-1781184" [ 1450.268887] env[62525]: _type = "Task" [ 1450.268887] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.269635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7162a6e5-b57a-4f2b-a7d8-eaeb439c5406 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.281094] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781184, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.345062] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52181cfe-6a4a-63ba-9166-94eb801d7dad, 'name': SearchDatastore_Task, 'duration_secs': 0.012213} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.345386] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.345628] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1450.345866] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.346029] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.346218] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.346751] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ee3e8fb-6f9a-4466-895f-efdf27d4c3de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.365388] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.365571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1450.366350] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53694a98-9064-48f7-b23d-72a0fa37ce79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.373255] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1450.373255] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5212c055-756d-6c5d-65c4-4f355942d5e1" [ 1450.373255] env[62525]: _type = "Task" [ 1450.373255] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.381924] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5212c055-756d-6c5d-65c4-4f355942d5e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.470424] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0067de08-6708-4c7c-a83a-ed9df193d5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1450.470794] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1450.470977] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1450.607860] env[62525]: DEBUG oslo_vmware.api [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781181, 'name': PowerOnVM_Task, 'duration_secs': 0.823867} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.608205] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1450.608417] env[62525]: DEBUG nova.compute.manager [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1450.609226] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d72851-bd5a-4726-9941-5c6853864857 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.783432] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.855259] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c74f1e-ce45-4668-9781-d4d2f244e5ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.863776] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bfe7fc-3469-42b8-9480-db8238e5dc93 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.899187] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea57f83f-4509-4bd0-bd59-03539874f60c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.910145] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec6431b-3c59-4eef-b7d8-6cac0b3e0c6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.913941] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5212c055-756d-6c5d-65c4-4f355942d5e1, 'name': SearchDatastore_Task, 'duration_secs': 0.014234} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.915040] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f74e6e-7fc3-41e9-b461-2735a3b9ef23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.924840] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1450.929754] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1450.929754] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5290526a-59c2-8113-d9ac-3b8af6b3fa6d" [ 1450.929754] env[62525]: _type = "Task" [ 1450.929754] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.938282] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5290526a-59c2-8113-d9ac-3b8af6b3fa6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.126852] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.213030] env[62525]: DEBUG nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1451.215289] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e25c1d1-dae4-469b-afe5-ce87b71b9724 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.282070] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781184, 'name': ReconfigVM_Task, 'duration_secs': 0.868596} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.282368] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 1fe967d9-351a-4b44-b7cb-d3c8395d9516/1fe967d9-351a-4b44-b7cb-d3c8395d9516.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1451.283360] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fda002d-39f8-439d-981d-4b73c57dc5e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.289704] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1451.289704] env[62525]: value = "task-1781185" [ 1451.289704] env[62525]: _type = "Task" [ 1451.289704] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.297732] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781185, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.442820] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5290526a-59c2-8113-d9ac-3b8af6b3fa6d, 'name': SearchDatastore_Task, 'duration_secs': 0.009519} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.443108] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.443453] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 10f10329-9a7d-4e1b-8fb4-90350169e518/10f10329-9a7d-4e1b-8fb4-90350169e518.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1451.443633] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b724cc8-0155-480f-a232-e6c2e294bc5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.447376] env[62525]: ERROR nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [req-eecd6f6c-30d6-414e-ac3b-0f40a1d0a6dd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eecd6f6c-30d6-414e-ac3b-0f40a1d0a6dd"}]} [ 1451.451505] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1451.451505] env[62525]: value = "task-1781186" [ 1451.451505] env[62525]: _type = "Task" [ 1451.451505] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.459350] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.464908] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1451.467830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.467830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.467830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.467830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.468078] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.470321] env[62525]: INFO nova.compute.manager [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Terminating instance [ 1451.474852] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "refresh_cache-85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.474852] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquired lock "refresh_cache-85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.474852] env[62525]: DEBUG nova.network.neutron [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1451.489015] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1451.489015] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1451.501678] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1451.523601] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1451.726379] env[62525]: INFO nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] instance snapshotting [ 1451.726379] env[62525]: WARNING nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1451.729312] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c33b7aa-1a00-4e19-89b8-e36dd2bf2983 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.759111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236cb422-cb69-4812-b9b4-6526cabe185d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.802803] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781185, 'name': Rename_Task, 'duration_secs': 0.149759} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.803126] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1451.803388] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42141128-e1e5-40bd-8112-7ce177fd05b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.810971] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1451.810971] env[62525]: value = "task-1781187" [ 1451.810971] env[62525]: _type = "Task" [ 1451.810971] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.819625] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.965678] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510921} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.965929] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 10f10329-9a7d-4e1b-8fb4-90350169e518/10f10329-9a7d-4e1b-8fb4-90350169e518.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1451.966162] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1451.966425] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96d91d73-83ab-4fb7-b96f-f1ed1b0f794d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.973692] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1451.973692] env[62525]: value = "task-1781188" [ 1451.973692] env[62525]: _type = "Task" [ 1451.973692] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.983611] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.999280] env[62525]: DEBUG nova.network.neutron [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1452.043403] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b24537-1252-4b8f-ae37-95a7257db278 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.052491] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29445633-b7cc-47ab-8266-2deb36738a6d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.058788] env[62525]: DEBUG nova.network.neutron [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.086652] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facf3db5-191d-45d3-9087-1332b00f2114 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.095855] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b580425f-601d-4edb-8d7b-566421b24d2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.112592] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.229299] env[62525]: DEBUG nova.objects.instance [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lazy-loading 'flavor' on Instance uuid d2e7c558-02af-477c-b996-239ef14ed75b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1452.273756] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1452.274175] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b01aa811-348e-4e33-b369-be58f88d73fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.281691] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1452.281691] env[62525]: value = "task-1781189" [ 1452.281691] env[62525]: _type = "Task" [ 1452.281691] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.290391] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781189, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.320007] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781187, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.486368] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082549} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.486620] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1452.487633] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d036360-f5ee-4219-9d5c-7badc4deb672 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.511729] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 10f10329-9a7d-4e1b-8fb4-90350169e518/10f10329-9a7d-4e1b-8fb4-90350169e518.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1452.512479] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9236f163-b026-4f4c-bde1-29d676269d34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.535140] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1452.535140] env[62525]: value = "task-1781190" [ 1452.535140] env[62525]: _type = "Task" [ 1452.535140] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.544192] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.586057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Releasing lock "refresh_cache-85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.586518] env[62525]: DEBUG nova.compute.manager [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1452.586744] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.587634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa41825c-5323-487b-9248-56c089b0a0a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.595642] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.595911] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f275476-e834-4e01-af95-92f68e2590cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.603051] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1452.603051] env[62525]: value = "task-1781191" [ 1452.603051] env[62525]: _type = "Task" [ 1452.603051] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.615808] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1452.620476] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.734394] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.734581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.794543] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781189, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.824451] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781187, 'name': PowerOnVM_Task} progress is 74%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.834614] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "c7603ce8-8471-4813-9faf-3667a205893c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.837838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.837838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "c7603ce8-8471-4813-9faf-3667a205893c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.837838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.837838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.838746] env[62525]: INFO nova.compute.manager [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Terminating instance [ 1452.843178] env[62525]: DEBUG nova.compute.manager [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1452.843640] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.844670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f7c1cb-6a7a-429c-a405-4e10e4b7b55a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.854126] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.854126] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b53d2850-252e-45b3-b92b-9c0b88e8faf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.859859] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1452.859859] env[62525]: value = "task-1781192" [ 1452.859859] env[62525]: _type = "Task" [ 1452.859859] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.868376] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1781192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.051413] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781190, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.113543] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781191, 'name': PowerOffVM_Task, 'duration_secs': 0.227634} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.113997] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1453.115101] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1453.115101] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3748ac65-bf96-42d9-a138-dd912a763700 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.122178] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1453.122178] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.275s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1453.122178] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.012s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.123283] env[62525]: INFO nova.compute.claims [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1453.126106] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.126271] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1453.140924] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1453.140924] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1453.140924] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Deleting the datastore file [datastore1] 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1453.140924] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-caa123e3-7d9f-4411-8a6f-fb810573cb09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.147563] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for the task: (returnval){ [ 1453.147563] env[62525]: value = "task-1781194" [ 1453.147563] env[62525]: _type = "Task" [ 1453.147563] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.156383] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.295755] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781189, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.320696] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781187, 'name': PowerOnVM_Task, 'duration_secs': 1.319467} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.321035] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.321215] env[62525]: INFO nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1453.321407] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1453.322226] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12e0f21-c40e-4c10-83a0-7ea43d22077f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.372518] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1781192, 'name': PowerOffVM_Task, 'duration_secs': 0.323001} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.372895] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1453.373134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1453.373415] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee951af3-f69f-48d8-917b-c9c5e9a6edfd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.443346] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1453.443691] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1453.443921] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Deleting the datastore file [datastore1] c7603ce8-8471-4813-9faf-3667a205893c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1453.444239] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-554e62f7-10d1-4e68-8410-f2b5ca110166 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.450859] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for the task: (returnval){ [ 1453.450859] env[62525]: value = "task-1781196" [ 1453.450859] env[62525]: _type = "Task" [ 1453.450859] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.458466] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1781196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.470614] env[62525]: DEBUG nova.network.neutron [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1453.547493] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781190, 'name': ReconfigVM_Task, 'duration_secs': 0.587138} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.547872] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 10f10329-9a7d-4e1b-8fb4-90350169e518/10f10329-9a7d-4e1b-8fb4-90350169e518.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1453.548564] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8f1debb-64cf-4d38-827d-fdd7c17716ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.555442] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1453.555442] env[62525]: value = "task-1781197" [ 1453.555442] env[62525]: _type = "Task" [ 1453.555442] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.563877] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781197, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.638609] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] There are 25 instances to clean {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1453.639187] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f2240974-0fa4-4f59-ae0c-b9da52f9600e] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1453.661940] env[62525]: DEBUG oslo_vmware.api [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Task: {'id': task-1781194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150375} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.662227] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1453.662460] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1453.663195] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1453.663195] env[62525]: INFO nova.compute.manager [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1453.663395] env[62525]: DEBUG oslo.service.loopingcall [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.663900] env[62525]: DEBUG nova.compute.manager [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1453.664034] env[62525]: DEBUG nova.network.neutron [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1453.680278] env[62525]: DEBUG nova.network.neutron [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1453.753901] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.754263] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.754677] env[62525]: DEBUG nova.objects.instance [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.796621] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781189, 'name': CreateSnapshot_Task, 'duration_secs': 1.433665} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.796934] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1453.798031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c33c615-cb34-402c-beed-cb81310d4fde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.843594] env[62525]: INFO nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Took 56.15 seconds to build instance. [ 1453.937117] env[62525]: DEBUG nova.compute.manager [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1453.937117] env[62525]: DEBUG nova.compute.manager [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing instance network info cache due to event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1453.937117] env[62525]: DEBUG oslo_concurrency.lockutils [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.961428] env[62525]: DEBUG oslo_vmware.api [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Task: {'id': task-1781196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170461} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.961589] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1453.961719] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1453.961862] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1453.962030] env[62525]: INFO nova.compute.manager [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1453.962287] env[62525]: DEBUG oslo.service.loopingcall [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.962475] env[62525]: DEBUG nova.compute.manager [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1453.962566] env[62525]: DEBUG nova.network.neutron [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1454.066378] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781197, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.146332] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e3f3fc2c-0060-4521-8aa3-da37209aee81] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1454.183219] env[62525]: DEBUG nova.network.neutron [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.315412] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1454.315807] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-05a046c2-882e-45ac-817f-2fb0fae81792 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.331202] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1454.331202] env[62525]: value = "task-1781198" [ 1454.331202] env[62525]: _type = "Task" [ 1454.331202] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.346514] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.733s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.346818] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781198, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.378095] env[62525]: DEBUG nova.objects.instance [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1454.395846] env[62525]: DEBUG nova.network.neutron [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.567772] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781197, 'name': Rename_Task, 'duration_secs': 0.623899} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.568070] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1454.568290] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf31f756-649c-40ff-ad7d-dd9aa4b4227d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.577104] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1454.577104] env[62525]: value = "task-1781199" [ 1454.577104] env[62525]: _type = "Task" [ 1454.577104] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.583553] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.598319] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8e18b1-5401-460d-8381-c839e5df222f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.605595] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea585f4b-2f47-4264-82c6-7aac402205b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.638354] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097c1fbf-5fd1-4a01-bfa0-65239763b6f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.650024] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f1e30f-2897-495a-8806-771d80549509 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.652312] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: cfae9bf8-012a-4286-b978-bba8a913bba2] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1454.667479] env[62525]: DEBUG nova.compute.provider_tree [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.685610] env[62525]: INFO nova.compute.manager [-] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Took 1.02 seconds to deallocate network for instance. [ 1454.843601] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781198, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.851235] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1454.882702] env[62525]: DEBUG nova.objects.base [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance<8adc8b4b-1087-4a11-9ee8-d897f1aa83f3> lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1454.882702] env[62525]: DEBUG nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1454.900429] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.900666] env[62525]: DEBUG nova.compute.manager [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Inject network info {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1454.900872] env[62525]: DEBUG nova.compute.manager [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] network_info to inject: |[{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1454.906681] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfiguring VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1454.907062] env[62525]: DEBUG oslo_concurrency.lockutils [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.907212] env[62525]: DEBUG nova.network.neutron [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1454.911053] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18c203e-4623-4cd9-84a1-d4b04ca6f3e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.929214] env[62525]: DEBUG oslo_vmware.api [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1454.929214] env[62525]: value = "task-1781200" [ 1454.929214] env[62525]: _type = "Task" [ 1454.929214] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.934790] env[62525]: DEBUG nova.policy [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1454.940096] env[62525]: DEBUG oslo_vmware.api [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.087758] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781199, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.097432] env[62525]: DEBUG nova.objects.instance [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lazy-loading 'flavor' on Instance uuid d2e7c558-02af-477c-b996-239ef14ed75b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.143808] env[62525]: DEBUG nova.network.neutron [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.159284] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 9dfb7d7f-6656-46fd-969e-c692db1ce507] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1455.170790] env[62525]: DEBUG nova.scheduler.client.report [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1455.195026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.248465] env[62525]: DEBUG nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Successfully created port: a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1455.344385] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781198, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.379152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.439763] env[62525]: DEBUG oslo_vmware.api [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781200, 'name': ReconfigVM_Task, 'duration_secs': 0.218004} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.441043] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2070f8d2-0b1b-4448-bb81-e16e97536384 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfigured VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1455.590975] env[62525]: DEBUG oslo_vmware.api [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781199, 'name': PowerOnVM_Task, 'duration_secs': 0.592405} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.591289] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1455.591525] env[62525]: INFO nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Took 8.72 seconds to spawn the instance on the hypervisor. [ 1455.591704] env[62525]: DEBUG nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1455.592523] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3526ee-6c28-4b35-95be-a1d18b59b26f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.607202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.644551] env[62525]: INFO nova.compute.manager [-] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Took 1.68 seconds to deallocate network for instance. [ 1455.666293] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 2b548b58-d5b9-4b6f-bef9-ea3b29f09f7b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1455.677722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.678346] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1455.682452] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.445s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.683926] env[62525]: INFO nova.compute.claims [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1455.738582] env[62525]: DEBUG nova.network.neutron [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updated VIF entry in instance network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1455.739497] env[62525]: DEBUG nova.network.neutron [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.842091] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781198, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.964881] env[62525]: DEBUG nova.compute.manager [req-3f161a00-f1ea-4dbd-8e94-130417960dd8 req-8dac1b2b-6f01-442f-9773-0b4615a1a916 service nova] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Received event network-vif-deleted-4eeddc3f-2e29-42bb-b0ed-b4bb41ca5c1b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1456.108800] env[62525]: INFO nova.compute.manager [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Took 55.85 seconds to build instance. [ 1456.152033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.170795] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e34ebddc-2192-4975-81d7-0f5c200f114e] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1456.188354] env[62525]: DEBUG nova.compute.utils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1456.192899] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1456.194026] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1456.242507] env[62525]: DEBUG oslo_concurrency.lockutils [req-813a7bb8-baa9-40ff-9167-534fe537b86f req-775889d0-71ec-4055-9a21-5121c56ffdb7 service nova] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.243293] env[62525]: DEBUG oslo_concurrency.lockutils [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.261189] env[62525]: DEBUG nova.policy [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58798a1ccbb04814bcf703a9178f0595', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60c49608612a460d8cdaaca431e0e4b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1456.343828] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781198, 'name': CloneVM_Task, 'duration_secs': 1.965286} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.344146] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Created linked-clone VM from snapshot [ 1456.345070] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5191ce1c-2fef-4276-8643-5db2ca525088 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.353593] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Uploading image b78e633c-4064-4554-9930-0559a5b29327 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1456.385735] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1456.385735] env[62525]: value = "vm-369684" [ 1456.385735] env[62525]: _type = "VirtualMachine" [ 1456.385735] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1456.386055] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3becd73c-f481-4777-99c5-2b1e0ee0d934 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.400938] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease: (returnval){ [ 1456.400938] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b084a4-a5c1-5d82-aa38-492cc157ea44" [ 1456.400938] env[62525]: _type = "HttpNfcLease" [ 1456.400938] env[62525]: } obtained for exporting VM: (result){ [ 1456.400938] env[62525]: value = "vm-369684" [ 1456.400938] env[62525]: _type = "VirtualMachine" [ 1456.400938] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1456.401725] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the lease: (returnval){ [ 1456.401725] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b084a4-a5c1-5d82-aa38-492cc157ea44" [ 1456.401725] env[62525]: _type = "HttpNfcLease" [ 1456.401725] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1456.411597] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.411597] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b084a4-a5c1-5d82-aa38-492cc157ea44" [ 1456.411597] env[62525]: _type = "HttpNfcLease" [ 1456.411597] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1456.610973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e1d9c18a-78a8-4152-9b01-0992385eb741 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.966s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.614834] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Successfully created port: c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1456.673878] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c70cf2f1-77a9-4eff-981f-9d72caa82c7b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1456.691323] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1456.912292] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.912292] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b084a4-a5c1-5d82-aa38-492cc157ea44" [ 1456.912292] env[62525]: _type = "HttpNfcLease" [ 1456.912292] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1456.912866] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1456.912866] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b084a4-a5c1-5d82-aa38-492cc157ea44" [ 1456.912866] env[62525]: _type = "HttpNfcLease" [ 1456.912866] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1456.913334] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaa6ec1-1178-4877-80ef-150cfc1547e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.930258] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1456.930437] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1456.932460] env[62525]: DEBUG nova.network.neutron [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.044379] env[62525]: DEBUG nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Successfully updated port: a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1457.093028] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f0ec5751-8c71-45b7-8879-3beaed645b1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.121730] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1457.177427] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: a1d1337f-3c41-4c1c-812b-aa10f2a680a8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1457.237559] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef726d33-55c3-4369-a59c-85fe98e0526c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.247088] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f8b1ea-423a-40c5-9225-498c91e22c94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.281306] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711537bf-5727-4a6c-96ab-02ae2cb8649a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.289752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d5076c-cd36-455a-9703-9430f3abe348 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.306059] env[62525]: DEBUG nova.compute.provider_tree [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.419863] env[62525]: DEBUG nova.compute.manager [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.420252] env[62525]: DEBUG nova.compute.manager [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing instance network info cache due to event network-changed-3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1457.420332] env[62525]: DEBUG oslo_concurrency.lockutils [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] Acquiring lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.548744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.548887] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.549151] env[62525]: DEBUG nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.645108] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.684366] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 84fbb408-7810-4166-a53e-242d51f60322] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1457.703097] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1457.740785] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1457.741093] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1457.741360] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.741618] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1457.742163] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.742670] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1457.743434] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1457.743752] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1457.743941] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1457.744251] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1457.744492] env[62525]: DEBUG nova.virt.hardware [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.745402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b23931-45cf-4987-802c-416d41e9161f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.756559] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feac61bf-a73d-4ad8-be34-24e8093b02d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.813275] env[62525]: DEBUG nova.scheduler.client.report [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1457.836722] env[62525]: DEBUG nova.network.neutron [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.040642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.040926] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.041265] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.041591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.041835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.044112] env[62525]: INFO nova.compute.manager [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Terminating instance [ 1458.046456] env[62525]: DEBUG nova.compute.manager [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.046944] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.048217] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc72813-c7a8-49f8-b8c7-f94b4710c06f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.060151] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.060647] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00746182-e121-440c-9f72-bfac601e4265 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.068774] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1458.068774] env[62525]: value = "task-1781202" [ 1458.068774] env[62525]: _type = "Task" [ 1458.068774] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.078687] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.107665] env[62525]: WARNING nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1458.188462] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e3255df2-2de0-4668-ad7b-a864ea680b44] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1458.319087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.319749] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1458.325157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.787s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.326232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.328561] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.581s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.331706] env[62525]: INFO nova.compute.claims [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1458.340736] env[62525]: DEBUG oslo_concurrency.lockutils [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.340736] env[62525]: DEBUG nova.compute.manager [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Inject network info {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1458.341248] env[62525]: DEBUG nova.compute.manager [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] network_info to inject: |[{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1458.345240] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfiguring VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1458.345729] env[62525]: DEBUG oslo_concurrency.lockutils [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] Acquired lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.345999] env[62525]: DEBUG nova.network.neutron [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Refreshing network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1458.348820] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fc4ee28-d666-44e0-97df-661b3c4b430d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.371180] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "10f10329-9a7d-4e1b-8fb4-90350169e518" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.371685] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.372154] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.372656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.372656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.375296] env[62525]: INFO nova.scheduler.client.report [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleted allocations for instance aa639aa3-d21c-4923-bc39-56e648c566fb [ 1458.380858] env[62525]: INFO nova.compute.manager [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Terminating instance [ 1458.389172] env[62525]: DEBUG oslo_vmware.api [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1458.389172] env[62525]: value = "task-1781203" [ 1458.389172] env[62525]: _type = "Task" [ 1458.389172] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.390747] env[62525]: DEBUG nova.compute.manager [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-vif-plugged-a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1458.390893] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.391181] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.391416] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.391599] env[62525]: DEBUG nova.compute.manager [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] No waiting events found dispatching network-vif-plugged-a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1458.391769] env[62525]: WARNING nova.compute.manager [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received unexpected event network-vif-plugged-a22c0ea8-32d8-47ab-bede-4917d1b3db27 for instance with vm_state active and task_state None. [ 1458.391930] env[62525]: DEBUG nova.compute.manager [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-changed-a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1458.392095] env[62525]: DEBUG nova.compute.manager [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing instance network info cache due to event network-changed-a22c0ea8-32d8-47ab-bede-4917d1b3db27. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1458.392305] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.393976] env[62525]: DEBUG nova.compute.manager [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.394189] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.395469] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93235bb8-264f-4935-9716-08a6b37186c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.410682] env[62525]: DEBUG oslo_vmware.api [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781203, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.413693] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.413693] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ce5673d-5429-4d97-8c58-b682d7fe12f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.423559] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1458.423559] env[62525]: value = "task-1781204" [ 1458.423559] env[62525]: _type = "Task" [ 1458.423559] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.435676] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.583900] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781202, 'name': PowerOffVM_Task, 'duration_secs': 0.322027} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.584861] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Successfully updated port: c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1458.586277] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1458.589741] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.589741] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-913d1ee1-f1ba-4011-adb2-ffe10bc3ec92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.619889] env[62525]: DEBUG nova.network.neutron [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a22c0ea8-32d8-47ab-bede-4917d1b3db27", "address": "fa:16:3e:a4:f9:e6", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c0ea8-32", "ovs_interfaceid": "a22c0ea8-32d8-47ab-bede-4917d1b3db27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.685486] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1458.685717] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1458.685920] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleting the datastore file [datastore1] 1fe967d9-351a-4b44-b7cb-d3c8395d9516 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1458.686356] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4ef4815-38dd-4ae3-9ecf-89d2c7c514c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.692398] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 1277dac8-3a23-4de8-93c7-c967b0eaf6ba] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1458.695818] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1458.695818] env[62525]: value = "task-1781206" [ 1458.695818] env[62525]: _type = "Task" [ 1458.695818] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.707570] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.716301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "d2e7c558-02af-477c-b996-239ef14ed75b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.716666] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.717094] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.717396] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.717791] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.720946] env[62525]: INFO nova.compute.manager [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Terminating instance [ 1458.722030] env[62525]: DEBUG nova.compute.manager [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.722208] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.723305] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3f36be-e8b1-48ba-8c32-eaebb8c63f0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.732585] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.732885] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25102c82-029e-42d7-b5f5-bf341eb9cf47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.741998] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1458.741998] env[62525]: value = "task-1781207" [ 1458.741998] env[62525]: _type = "Task" [ 1458.741998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.752229] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.826176] env[62525]: DEBUG nova.compute.utils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1458.827818] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1458.828111] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1458.885967] env[62525]: DEBUG nova.policy [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b2a3e9006c44ebabc5a73be540b9045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4685480cae574a5daac6a1f077a8c319', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1458.892708] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a5ee228-217c-42e8-9957-ff4e38637877 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "aa639aa3-d21c-4923-bc39-56e648c566fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.824s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.906012] env[62525]: DEBUG oslo_vmware.api [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781203, 'name': ReconfigVM_Task, 'duration_secs': 0.175793} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.906459] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-05739f3c-9c7c-4af3-b279-800cd9589f31 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Reconfigured VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1458.938136] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781204, 'name': PowerOffVM_Task, 'duration_secs': 0.234659} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.941041] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1458.941234] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.941538] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f43fee05-13f6-4d2e-bd1a-a5ca8cc67d76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.091379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.091379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquired lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.091379] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.122940] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.123384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.123546] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.123938] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.124118] env[62525]: DEBUG nova.network.neutron [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Refreshing network info cache for port a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.125661] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5725c4-b07a-45c5-8142-cb8e50d03795 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.145133] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1459.145391] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1459.145551] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1459.145728] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1459.145874] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1459.146047] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1459.146277] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1459.146438] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1459.146604] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1459.146768] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1459.147025] env[62525]: DEBUG nova.virt.hardware [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1459.153739] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfiguring VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1459.157067] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c361e13-1088-4d3c-91a9-09731e38af88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.170951] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.171174] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.171356] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleting the datastore file [datastore1] 10f10329-9a7d-4e1b-8fb4-90350169e518 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.175025] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ab85915-465a-4083-bf72-8318b238556b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.177522] env[62525]: DEBUG oslo_vmware.api [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1459.177522] env[62525]: value = "task-1781209" [ 1459.177522] env[62525]: _type = "Task" [ 1459.177522] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.181953] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for the task: (returnval){ [ 1459.181953] env[62525]: value = "task-1781210" [ 1459.181953] env[62525]: _type = "Task" [ 1459.181953] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.188733] env[62525]: DEBUG oslo_vmware.api [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781209, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.194143] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.196691] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: d38bbd59-b40c-4965-b823-caefc93e2568] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1459.208728] env[62525]: DEBUG oslo_vmware.api [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.208922] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.209605] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.209605] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.209605] env[62525]: INFO nova.compute.manager [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1459.209781] env[62525]: DEBUG oslo.service.loopingcall [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.209959] env[62525]: DEBUG nova.compute.manager [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.210076] env[62525]: DEBUG nova.network.neutron [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.253239] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781207, 'name': PowerOffVM_Task, 'duration_secs': 0.218311} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.253727] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1459.254152] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1459.254606] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8bb625a-07cd-429c-8d8a-4dd996090d86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.312994] env[62525]: DEBUG nova.network.neutron [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updated VIF entry in instance network info cache for port 3e9589f0-83a5-4985-ac44-61ded6abf83e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1459.312994] env[62525]: DEBUG nova.network.neutron [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [{"id": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "address": "fa:16:3e:ae:dd:22", "network": {"id": "d207d57d-e0c4-4bfd-8e8d-81f6cc180f65", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-257946311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18483247e96e4263b0d32088d19debf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9589f0-83", "ovs_interfaceid": "3e9589f0-83a5-4985-ac44-61ded6abf83e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.333851] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1459.356632] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.356632] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.356632] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Deleting the datastore file [datastore1] d2e7c558-02af-477c-b996-239ef14ed75b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.357713] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Successfully created port: b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1459.360022] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a68eb7d-9e26-4973-b13e-0536b49c0412 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.369223] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for the task: (returnval){ [ 1459.369223] env[62525]: value = "task-1781212" [ 1459.369223] env[62525]: _type = "Task" [ 1459.369223] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.385575] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.635460] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1459.695251] env[62525]: DEBUG oslo_vmware.api [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.698531] env[62525]: DEBUG oslo_vmware.api [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Task: {'id': task-1781210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212995} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.701182] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.701360] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.701968] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.701968] env[62525]: INFO nova.compute.manager [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1459.701968] env[62525]: DEBUG oslo.service.loopingcall [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.704555] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8b41bff7-137f-489c-bb88-7487eb8e97cb] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1459.707204] env[62525]: DEBUG nova.compute.manager [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.707324] env[62525]: DEBUG nova.network.neutron [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.720583] env[62525]: DEBUG nova.compute.manager [req-a99e368c-3da0-4986-b093-f814d67069f9 req-518efc20-b4c4-4e51-bbf2-f7dfdea6f736 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Received event network-vif-deleted-e38028c4-293a-4544-8825-58a9c035c2f0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.720803] env[62525]: INFO nova.compute.manager [req-a99e368c-3da0-4986-b093-f814d67069f9 req-518efc20-b4c4-4e51-bbf2-f7dfdea6f736 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Neutron deleted interface e38028c4-293a-4544-8825-58a9c035c2f0; detaching it from the instance and deleting it from the info cache [ 1459.720991] env[62525]: DEBUG nova.network.neutron [req-a99e368c-3da0-4986-b093-f814d67069f9 req-518efc20-b4c4-4e51-bbf2-f7dfdea6f736 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.814799] env[62525]: DEBUG oslo_concurrency.lockutils [req-efb7fc00-34b9-496b-a918-f545d13e3117 req-c92c6572-c6e6-488a-bfa9-57e5d69e53ac service nova] Releasing lock "refresh_cache-d2e7c558-02af-477c-b996-239ef14ed75b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.842433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2665223b-80c5-4eb4-8cea-02f426c0fa96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.852596] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4580a8-09e6-40ca-805e-aa09336cf25e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.894600] env[62525]: DEBUG nova.network.neutron [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Updating instance_info_cache with network_info: [{"id": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "address": "fa:16:3e:fc:11:99", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e1d38a-ff", "ovs_interfaceid": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.899260] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30e1501-34bc-4452-ba6d-92abefe91b6d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.912154] env[62525]: DEBUG oslo_vmware.api [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Task: {'id': task-1781212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.912154] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.912370] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.912519] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.912684] env[62525]: INFO nova.compute.manager [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1459.912906] env[62525]: DEBUG oslo.service.loopingcall [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.914185] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c54ff9-b1d9-49c0-b0e1-d7701803dfa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.918955] env[62525]: DEBUG nova.compute.manager [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.919146] env[62525]: DEBUG nova.network.neutron [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.932251] env[62525]: DEBUG nova.compute.provider_tree [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.013583] env[62525]: DEBUG nova.network.neutron [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updated VIF entry in instance network info cache for port a22c0ea8-32d8-47ab-bede-4917d1b3db27. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.014185] env[62525]: DEBUG nova.network.neutron [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a22c0ea8-32d8-47ab-bede-4917d1b3db27", "address": "fa:16:3e:a4:f9:e6", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c0ea8-32", "ovs_interfaceid": "a22c0ea8-32d8-47ab-bede-4917d1b3db27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.033594] env[62525]: DEBUG nova.network.neutron [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.192373] env[62525]: DEBUG oslo_vmware.api [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781209, 'name': ReconfigVM_Task, 'duration_secs': 0.921395} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.193058] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.193236] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfigured VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1460.212785] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 35a2e221-e1c5-49d9-af93-5e5f28c62b8f] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1460.224071] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-955f003d-bf21-4208-931d-d3acb4c6d190 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.235348] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626f3036-f7a5-4f9f-989e-9d35b5782ab7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.281559] env[62525]: DEBUG nova.compute.manager [req-a99e368c-3da0-4986-b093-f814d67069f9 req-518efc20-b4c4-4e51-bbf2-f7dfdea6f736 service nova] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Detach interface failed, port_id=e38028c4-293a-4544-8825-58a9c035c2f0, reason: Instance 1fe967d9-351a-4b44-b7cb-d3c8395d9516 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1460.343716] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1460.372148] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1460.372413] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1460.372567] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1460.372835] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1460.372942] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1460.373099] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1460.373312] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1460.373470] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1460.373631] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1460.373790] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1460.373958] env[62525]: DEBUG nova.virt.hardware [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1460.374836] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913df254-f688-40cb-acbf-1375be719a4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.384079] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7675f88-d459-4097-8c16-0fc6d2fa4dcf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.403074] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Releasing lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.403414] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Instance network_info: |[{"id": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "address": "fa:16:3e:fc:11:99", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e1d38a-ff", "ovs_interfaceid": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1460.404187] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:11:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.411473] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Creating folder: Project (60c49608612a460d8cdaaca431e0e4b0). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1460.411778] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89e3c326-bcd3-4406-abe8-aa4bc4b2b283 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.425106] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Created folder: Project (60c49608612a460d8cdaaca431e0e4b0) in parent group-v369553. [ 1460.425329] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Creating folder: Instances. Parent ref: group-v369685. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1460.425582] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3afbd22-dc62-4301-a425-5e104ca7099a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.437441] env[62525]: DEBUG nova.scheduler.client.report [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.441041] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Created folder: Instances in parent group-v369685. [ 1460.441220] env[62525]: DEBUG oslo.service.loopingcall [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.441560] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1460.441769] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7296ba43-b6af-487c-8dd0-d0546fe89367 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.465022] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.465022] env[62525]: value = "task-1781215" [ 1460.465022] env[62525]: _type = "Task" [ 1460.465022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.477131] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781215, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.517010] env[62525]: DEBUG oslo_concurrency.lockutils [req-28be5a45-1700-480e-9651-31a8ff7bfe15 req-2ce388f8-542d-47b5-b887-b54413d7f35b service nova] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.536719] env[62525]: INFO nova.compute.manager [-] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Took 1.33 seconds to deallocate network for instance. [ 1460.603145] env[62525]: DEBUG nova.network.neutron [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.697683] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2537bd3b-0f15-4694-baca-0a2529c461d7 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.943s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.718440] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b6bdc187-a266-4f7d-a9e4-85cb100cf4bf] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1460.743548] env[62525]: DEBUG nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Received event network-vif-plugged-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1460.743548] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Acquiring lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.743548] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.743548] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.743548] env[62525]: DEBUG nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] No waiting events found dispatching network-vif-plugged-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1460.745949] env[62525]: WARNING nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Received unexpected event network-vif-plugged-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 for instance with vm_state building and task_state spawning. [ 1460.746193] env[62525]: DEBUG nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Received event network-changed-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1460.746362] env[62525]: DEBUG nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Refreshing instance network info cache due to event network-changed-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1460.746564] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Acquiring lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.746705] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Acquired lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.746979] env[62525]: DEBUG nova.network.neutron [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Refreshing network info cache for port c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.936220] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Successfully updated port: b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.943039] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.944029] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1460.946855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.124s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.947228] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.950062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.088s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.952224] env[62525]: INFO nova.compute.claims [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.976283] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781215, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.984553] env[62525]: INFO nova.scheduler.client.report [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Deleted allocations for instance 98334a1b-1a73-408f-93a4-6dc72764ebfc [ 1461.002627] env[62525]: DEBUG nova.network.neutron [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.043892] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.106014] env[62525]: INFO nova.compute.manager [-] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Took 1.40 seconds to deallocate network for instance. [ 1461.223889] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f4cf1732-6b6a-47be-acf4-b127bc4b9baf] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1461.438690] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.438825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.438978] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.456669] env[62525]: DEBUG nova.compute.utils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1461.460656] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1461.460841] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1461.476688] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781215, 'name': CreateVM_Task, 'duration_secs': 0.725404} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.477555] env[62525]: DEBUG nova.network.neutron [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Updated VIF entry in instance network info cache for port c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.477877] env[62525]: DEBUG nova.network.neutron [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Updating instance_info_cache with network_info: [{"id": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "address": "fa:16:3e:fc:11:99", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8e1d38a-ff", "ovs_interfaceid": "c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.478925] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.480577] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.480795] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.481092] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1461.481593] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1151a9d2-9c22-46c2-af8f-2e16b4be9915 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.486892] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1461.486892] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527ef392-5781-1df0-c01f-6cc4109b2158" [ 1461.486892] env[62525]: _type = "Task" [ 1461.486892] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.497223] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ce40252-4061-4490-9ad4-4e5b005240c1 tempest-ListServerFiltersTestJSON-329634414 tempest-ListServerFiltersTestJSON-329634414-project-member] Lock "98334a1b-1a73-408f-93a4-6dc72764ebfc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.229s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.501989] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ef392-5781-1df0-c01f-6cc4109b2158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.506734] env[62525]: INFO nova.compute.manager [-] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Took 1.59 seconds to deallocate network for instance. [ 1461.548376] env[62525]: DEBUG nova.policy [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec9ca60ca555404da6eeba93b5334182', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af6b089a5dcf4137bb880d77c010f975', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1461.612924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.728367] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 30fbab3d-8141-4d7e-987f-e4f4fc4a1808] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1461.865986] env[62525]: DEBUG nova.compute.manager [req-b6c2c691-a501-4e4a-84b1-4d7a252ea5eb req-e1e20af9-27eb-487a-8e6c-9224694b8009 service nova] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Received event network-vif-deleted-3e9589f0-83a5-4985-ac44-61ded6abf83e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1461.961921] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1461.977644] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1461.979844] env[62525]: DEBUG oslo_concurrency.lockutils [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] Releasing lock "refresh_cache-7c8474fd-2ca5-4ecc-b2e6-4248baafd639" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.979957] env[62525]: DEBUG nova.compute.manager [req-c238bd52-14d4-45dd-8ed7-79a4a68af795 req-38768b2f-c233-4f00-a848-6543849f6b93 service nova] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Received event network-vif-deleted-3325b087-dcb4-4eae-9fb2-f584a769e45a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.006424] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ef392-5781-1df0-c01f-6cc4109b2158, 'name': SearchDatastore_Task, 'duration_secs': 0.015532} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.011613] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.011879] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.012118] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.012319] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.012504] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.013752] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.014527] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-501096da-5241-48f7-9f8a-044361e75a04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.028142] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.028365] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.030504] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e055fb5-623f-4963-8793-dca727aa4f39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.040841] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1462.040841] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10ab2-9af5-76b5-3420-c918568ff04c" [ 1462.040841] env[62525]: _type = "Task" [ 1462.040841] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.051320] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f10ab2-9af5-76b5-3420-c918568ff04c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.088988] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Successfully created port: 62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1462.167880] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-a22c0ea8-32d8-47ab-bede-4917d1b3db27" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.168159] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-a22c0ea8-32d8-47ab-bede-4917d1b3db27" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.215914] env[62525]: DEBUG nova.network.neutron [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Updating instance_info_cache with network_info: [{"id": "b3bb2d5b-835d-4462-8234-ea61148680b4", "address": "fa:16:3e:84:79:d0", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3bb2d5b-83", "ovs_interfaceid": "b3bb2d5b-835d-4462-8234-ea61148680b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.231640] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 7f8392fa-1c11-4180-bda9-057b5cfa058c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1462.459681] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c780946-1f8f-4bcc-b3e3-ff16102b06df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.469335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a097b7de-1a27-45c0-bba5-6fb1bc7fe79a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.509013] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7fefc8-57cc-433e-95ba-14a93cc02613 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.519346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810ce174-2723-42cc-b74e-69d4415fdba3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.537084] env[62525]: DEBUG nova.compute.provider_tree [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.554109] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f10ab2-9af5-76b5-3420-c918568ff04c, 'name': SearchDatastore_Task, 'duration_secs': 0.012157} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.554925] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d4bc9fe-74b5-4f9a-9dad-5e9b8fbaa7cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.562532] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1462.562532] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52165aa4-b05f-2cdb-e3a1-91c2b41ef82c" [ 1462.562532] env[62525]: _type = "Task" [ 1462.562532] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.572309] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52165aa4-b05f-2cdb-e3a1-91c2b41ef82c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.631230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "deef59c8-f710-434d-bddc-f63bb3d518b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.631546] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.631759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.631946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.632129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.634246] env[62525]: INFO nova.compute.manager [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Terminating instance [ 1462.636014] env[62525]: DEBUG nova.compute.manager [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1462.636233] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1462.637103] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fb0d55-5608-4f23-849d-0c6cf35b2a67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.645928] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1462.646197] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0de2a69-fe02-4114-9336-6974aab15e67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.654800] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1462.654800] env[62525]: value = "task-1781216" [ 1462.654800] env[62525]: _type = "Task" [ 1462.654800] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.664218] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.670937] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.671131] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.671939] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b06d0d8-2e3c-451b-b0bd-43d3c176f065 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.691130] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb720cf-c617-4eea-8f66-60b353cca7e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.718268] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfiguring VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1462.718838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.719171] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Instance network_info: |[{"id": "b3bb2d5b-835d-4462-8234-ea61148680b4", "address": "fa:16:3e:84:79:d0", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3bb2d5b-83", "ovs_interfaceid": "b3bb2d5b-835d-4462-8234-ea61148680b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1462.719496] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd3c2fc4-69b8-421d-b14b-9be661a481d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.734201] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:79:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3bb2d5b-835d-4462-8234-ea61148680b4', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1462.742549] env[62525]: DEBUG oslo.service.loopingcall [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1462.742975] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 82ea280a-4e1b-4fac-a634-7f79ce731564] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1462.744652] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1462.746475] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca9d7b11-2818-495c-b1c1-636e5acc177c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.764631] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1462.764631] env[62525]: value = "task-1781217" [ 1462.764631] env[62525]: _type = "Task" [ 1462.764631] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.771672] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1462.771672] env[62525]: value = "task-1781218" [ 1462.771672] env[62525]: _type = "Task" [ 1462.771672] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.775367] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.781801] env[62525]: DEBUG nova.compute.manager [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Received event network-vif-plugged-b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.782187] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Acquiring lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.782456] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.784860] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.784860] env[62525]: DEBUG nova.compute.manager [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] No waiting events found dispatching network-vif-plugged-b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1462.784860] env[62525]: WARNING nova.compute.manager [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Received unexpected event network-vif-plugged-b3bb2d5b-835d-4462-8234-ea61148680b4 for instance with vm_state building and task_state spawning. [ 1462.784860] env[62525]: DEBUG nova.compute.manager [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Received event network-changed-b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.784860] env[62525]: DEBUG nova.compute.manager [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Refreshing instance network info cache due to event network-changed-b3bb2d5b-835d-4462-8234-ea61148680b4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1462.784860] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Acquiring lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.784860] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Acquired lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.784860] env[62525]: DEBUG nova.network.neutron [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Refreshing network info cache for port b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1462.793684] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781218, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.978459] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1463.016642] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1463.016940] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1463.017185] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1463.017429] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1463.017620] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1463.017802] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1463.018060] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1463.018261] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1463.018485] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1463.018698] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1463.018939] env[62525]: DEBUG nova.virt.hardware [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1463.019891] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce138e6c-64f6-4399-bbba-bdfd3ada630c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.029236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd459def-c992-4bab-864f-91ed481a7a6f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.045713] env[62525]: DEBUG nova.scheduler.client.report [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1463.075824] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52165aa4-b05f-2cdb-e3a1-91c2b41ef82c, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.076238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.076618] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7c8474fd-2ca5-4ecc-b2e6-4248baafd639/7c8474fd-2ca5-4ecc-b2e6-4248baafd639.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.076946] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef105fb3-8803-4fa7-8ee4-12e5797e20c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.084672] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1463.084672] env[62525]: value = "task-1781219" [ 1463.084672] env[62525]: _type = "Task" [ 1463.084672] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.097803] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.165062] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781216, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.263887] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0e8254af-403d-4f5d-ac58-f3b4efc0c3d6] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1463.277846] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.290881] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781218, 'name': CreateVM_Task, 'duration_secs': 0.417766} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.291064] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1463.292174] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.292402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.292764] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1463.293721] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec1115d7-4e5c-40b1-bb85-bcbc09e49c6b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.300694] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1463.300694] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529a2434-7474-0933-5b04-80794c5063ea" [ 1463.300694] env[62525]: _type = "Task" [ 1463.300694] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.312493] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a2434-7474-0933-5b04-80794c5063ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.552426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.602s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.553016] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1463.556183] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.041s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.556447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.559745] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.642s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.560983] env[62525]: INFO nova.compute.claims [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.604335] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781219, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.608584] env[62525]: INFO nova.scheduler.client.report [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Deleted allocations for instance cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9 [ 1463.667409] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781216, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.711261] env[62525]: DEBUG nova.network.neutron [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Updated VIF entry in instance network info cache for port b3bb2d5b-835d-4462-8234-ea61148680b4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1463.711386] env[62525]: DEBUG nova.network.neutron [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Updating instance_info_cache with network_info: [{"id": "b3bb2d5b-835d-4462-8234-ea61148680b4", "address": "fa:16:3e:84:79:d0", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3bb2d5b-83", "ovs_interfaceid": "b3bb2d5b-835d-4462-8234-ea61148680b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.771439] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8c6e22d6-353f-4be5-8400-7fe38a9bee25] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1463.782345] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.818553] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a2434-7474-0933-5b04-80794c5063ea, 'name': SearchDatastore_Task, 'duration_secs': 0.081965} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.818870] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.819173] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1463.819389] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.819758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.819758] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1463.819965] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c744b094-1d0a-47dd-9ab3-98c67252b554 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.832031] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1463.832031] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1463.832862] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae99a814-d7c4-4e23-9e08-14f7c7ef487d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.840242] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1463.840242] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52de3d26-f57f-9a23-4a7e-fb5e8c2997d5" [ 1463.840242] env[62525]: _type = "Task" [ 1463.840242] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.850410] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52de3d26-f57f-9a23-4a7e-fb5e8c2997d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.903430] env[62525]: DEBUG nova.compute.manager [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Received event network-vif-plugged-62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1463.903658] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] Acquiring lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.903866] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.904124] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.904380] env[62525]: DEBUG nova.compute.manager [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] No waiting events found dispatching network-vif-plugged-62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1463.904528] env[62525]: WARNING nova.compute.manager [req-6d9b9da6-0879-4dba-9893-6f8e9b8ff0e9 req-8d9d8f52-747e-4d08-a8d9-485f8c5b2645 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Received unexpected event network-vif-plugged-62c6a0e2-0091-4863-a677-cbdf737769b7 for instance with vm_state building and task_state spawning. [ 1464.068726] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Successfully updated port: 62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1464.070499] env[62525]: DEBUG nova.compute.utils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1464.073770] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1464.073942] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1464.099574] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586547} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.099784] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7c8474fd-2ca5-4ecc-b2e6-4248baafd639/7c8474fd-2ca5-4ecc-b2e6-4248baafd639.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.099983] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.100292] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cf7219a-1dcc-400c-bcae-31c71af80169 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.108576] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1464.108576] env[62525]: value = "task-1781220" [ 1464.108576] env[62525]: _type = "Task" [ 1464.108576] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.121381] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.121874] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22b4ecd0-8bc4-470e-8c52-77ac13997c35 tempest-ServersTestBootFromVolume-184707848 tempest-ServersTestBootFromVolume-184707848-project-member] Lock "cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.317s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.168698] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781216, 'name': PowerOffVM_Task, 'duration_secs': 1.28819} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.169015] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.169257] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1464.169649] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aeeec81b-1e4c-4abf-b0fd-40c2175cf51d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.201730] env[62525]: DEBUG nova.policy [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec9ca60ca555404da6eeba93b5334182', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af6b089a5dcf4137bb880d77c010f975', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1464.214662] env[62525]: DEBUG oslo_concurrency.lockutils [req-58e7fdbc-19df-49a7-962d-417f5edbeb3a req-74893861-13fe-4795-804d-652146e895df service nova] Releasing lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.264114] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1464.264361] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1464.264509] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Deleting the datastore file [datastore1] deef59c8-f710-434d-bddc-f63bb3d518b1 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.264778] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0da511c-6c91-46b8-9594-3d5067b77f0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.271800] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for the task: (returnval){ [ 1464.271800] env[62525]: value = "task-1781222" [ 1464.271800] env[62525]: _type = "Task" [ 1464.271800] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.278645] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 9a7bfafe-8598-4c6f-9714-0567fcbb8ea6] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1464.280812] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.291246] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.352383] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52de3d26-f57f-9a23-4a7e-fb5e8c2997d5, 'name': SearchDatastore_Task, 'duration_secs': 0.014413} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.353261] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef704ef7-3ee4-43ac-a408-9420f434ea4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.364437] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1464.364437] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526d9281-9d0d-3ec4-98a7-95268a4e1a61" [ 1464.364437] env[62525]: _type = "Task" [ 1464.364437] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.376746] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526d9281-9d0d-3ec4-98a7-95268a4e1a61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.561501] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Successfully created port: b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1464.574890] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.575066] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.575205] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1464.576570] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1464.630457] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075096} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.631812] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.633326] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d004227a-820b-4fa6-a3ee-62a8e55d5b29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.668185] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 7c8474fd-2ca5-4ecc-b2e6-4248baafd639/7c8474fd-2ca5-4ecc-b2e6-4248baafd639.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.672028] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5a33372-5d11-4323-9c2d-180b799131f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.696593] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1464.696593] env[62525]: value = "task-1781223" [ 1464.696593] env[62525]: _type = "Task" [ 1464.696593] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.707509] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.782757] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5b6b4ed4-67c3-4f0c-ac2a-d5c8981a9e07] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1464.794179] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.801293] env[62525]: DEBUG oslo_vmware.api [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Task: {'id': task-1781222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196308} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.802492] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.802811] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1464.803426] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1464.803511] env[62525]: INFO nova.compute.manager [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1464.803916] env[62525]: DEBUG oslo.service.loopingcall [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.804503] env[62525]: DEBUG nova.compute.manager [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1464.804503] env[62525]: DEBUG nova.network.neutron [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1464.876518] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526d9281-9d0d-3ec4-98a7-95268a4e1a61, 'name': SearchDatastore_Task, 'duration_secs': 0.013484} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.876518] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.876735] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 61f05e69-5e90-47da-9f47-3651b580a23c/61f05e69-5e90-47da-9f47-3651b580a23c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1464.876995] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a385450d-15de-48ec-a4ac-97406bb22195 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.890331] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1464.890331] env[62525]: value = "task-1781224" [ 1464.890331] env[62525]: _type = "Task" [ 1464.890331] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.905460] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.983081] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "1f3792c0-9f86-4d76-a1a6-28d492869046" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.983326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.983549] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.983857] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.983993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.986293] env[62525]: INFO nova.compute.manager [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Terminating instance [ 1464.988717] env[62525]: DEBUG nova.compute.manager [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1464.988912] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1464.990101] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7885315-7179-4a5d-9fdb-e8f77b9d9649 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.999663] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.002962] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5016546-f01e-49c2-af3f-0065459475ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.011861] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1465.011861] env[62525]: value = "task-1781225" [ 1465.011861] env[62525]: _type = "Task" [ 1465.011861] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.022121] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.148251] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4ac9f4-c683-4a5a-94a4-b1026fb737ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.159682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4a14c7-c19b-4cb6-a376-9dff52376edb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.199859] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1465.206836] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c64f865-d953-4aff-9c95-ef6434993b90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.218767] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781223, 'name': ReconfigVM_Task, 'duration_secs': 0.322384} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.221818] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 7c8474fd-2ca5-4ecc-b2e6-4248baafd639/7c8474fd-2ca5-4ecc-b2e6-4248baafd639.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.224515] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8afe88b8-0ec7-4a0b-a8cf-f8eb3c203de6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.225325] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef664c75-025f-4c0c-82a0-a66226bfabc0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.245919] env[62525]: DEBUG nova.compute.provider_tree [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.249697] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1465.249697] env[62525]: value = "task-1781226" [ 1465.249697] env[62525]: _type = "Task" [ 1465.249697] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.261492] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781226, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.280537] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.297079] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5bffec39-0b09-49a0-a862-560720db45cd] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1465.408836] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781224, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.528644] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781225, 'name': PowerOffVM_Task, 'duration_secs': 0.470763} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.528972] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1465.530804] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1465.530804] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68856579-7a08-401e-97ca-f6729301d2cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.602116] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1465.608097] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1465.608423] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1465.608660] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Deleting the datastore file [datastore1] 1f3792c0-9f86-4d76-a1a6-28d492869046 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1465.608996] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdca0cf8-075c-4bc3-b45f-267937e62b71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.618983] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for the task: (returnval){ [ 1465.618983] env[62525]: value = "task-1781228" [ 1465.618983] env[62525]: _type = "Task" [ 1465.618983] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.634892] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.637554] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1465.638581] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1465.638581] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.638581] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1465.638581] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.638581] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1465.638941] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1465.638941] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1465.639049] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1465.639187] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1465.639380] env[62525]: DEBUG nova.virt.hardware [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.640343] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c6f39b-259c-45bc-b979-9bd2c258eb42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.650023] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56473382-9ff5-4631-950a-110a99040702 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.751389] env[62525]: DEBUG nova.scheduler.client.report [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.771476] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781226, 'name': Rename_Task, 'duration_secs': 0.423032} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.776540] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1465.778624] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a050fc7f-4e4e-44b0-a440-4d7b1bb46ba9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.788160] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.791709] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1465.791709] env[62525]: value = "task-1781229" [ 1465.791709] env[62525]: _type = "Task" [ 1465.791709] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.802011] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.804383] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5c9ca73a-bc48-4a75-89c8-03def719e488] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1465.903084] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781224, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.015183] env[62525]: DEBUG nova.compute.manager [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Received event network-changed-62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.015183] env[62525]: DEBUG nova.compute.manager [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Refreshing instance network info cache due to event network-changed-62c6a0e2-0091-4863-a677-cbdf737769b7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1466.015183] env[62525]: DEBUG oslo_concurrency.lockutils [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] Acquiring lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.087277] env[62525]: DEBUG nova.network.neutron [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Updating instance_info_cache with network_info: [{"id": "62c6a0e2-0091-4863-a677-cbdf737769b7", "address": "fa:16:3e:25:d8:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62c6a0e2-00", "ovs_interfaceid": "62c6a0e2-0091-4863-a677-cbdf737769b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.132119] env[62525]: DEBUG oslo_vmware.api [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Task: {'id': task-1781228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18761} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.132510] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1466.132849] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1466.133089] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1466.133360] env[62525]: INFO nova.compute.manager [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1466.133715] env[62525]: DEBUG oslo.service.loopingcall [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.133992] env[62525]: DEBUG nova.compute.manager [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1466.134144] env[62525]: DEBUG nova.network.neutron [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1466.258228] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.258401] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1466.262700] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.561s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.269777] env[62525]: INFO nova.compute.claims [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1466.287648] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.307923] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781229, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.308512] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.308751] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances with incomplete migration {{(pid=62525) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1466.403785] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781224, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.475385] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1466.479759] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8433b9-79c6-45dc-a096-b1fc92110b19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.489430] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1466.489600] env[62525]: ERROR oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk due to incomplete transfer. [ 1466.489871] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e167668b-3704-494c-a516-5d50743832a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.499756] env[62525]: DEBUG oslo_vmware.rw_handles [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a5fd11-982f-8f07-9fb7-79d26bc5409a/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1466.500039] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Uploaded image b78e633c-4064-4554-9930-0559a5b29327 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1466.503049] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1466.503361] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3967e95d-c160-4238-9e3c-423807b6b8ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.515125] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1466.515125] env[62525]: value = "task-1781230" [ 1466.515125] env[62525]: _type = "Task" [ 1466.515125] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.527831] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781230, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.597023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.597023] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Instance network_info: |[{"id": "62c6a0e2-0091-4863-a677-cbdf737769b7", "address": "fa:16:3e:25:d8:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62c6a0e2-00", "ovs_interfaceid": "62c6a0e2-0091-4863-a677-cbdf737769b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1466.597023] env[62525]: DEBUG oslo_concurrency.lockutils [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] Acquired lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.597023] env[62525]: DEBUG nova.network.neutron [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Refreshing network info cache for port 62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1466.597023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:d8:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62c6a0e2-0091-4863-a677-cbdf737769b7', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1466.606355] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating folder: Project (af6b089a5dcf4137bb880d77c010f975). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1466.607356] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f29879c2-2bd1-4faf-9f5d-d04c31bb699d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.626490] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Created folder: Project (af6b089a5dcf4137bb880d77c010f975) in parent group-v369553. [ 1466.626490] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating folder: Instances. Parent ref: group-v369689. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1466.626490] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2898d6fe-1201-4303-82f5-6e7586bb13ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.638306] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Created folder: Instances in parent group-v369689. [ 1466.638306] env[62525]: DEBUG oslo.service.loopingcall [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.638306] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1466.638423] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-001292a6-be93-4fcf-a3ff-f2b909e1311f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.663952] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1466.663952] env[62525]: value = "task-1781233" [ 1466.663952] env[62525]: _type = "Task" [ 1466.663952] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.675437] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781233, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.772938] env[62525]: DEBUG nova.compute.utils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.780683] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1466.781091] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1466.805821] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.815667] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781229, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.816514] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.905851] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781224, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.617392} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.906144] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 61f05e69-5e90-47da-9f47-3651b580a23c/61f05e69-5e90-47da-9f47-3651b580a23c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.906553] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.906931] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58193dec-2bce-464c-82da-1abd688bd158 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.916657] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1466.916657] env[62525]: value = "task-1781234" [ 1466.916657] env[62525]: _type = "Task" [ 1466.916657] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.932629] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.941870] env[62525]: DEBUG nova.policy [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd21a8e1379ed4017992ff1f8befa90b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cae5d0f44332499ab2dbd7a69fc0aff2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1466.971865] env[62525]: DEBUG nova.compute.manager [req-e90c24a9-aafb-4403-8347-0c405bc4d78e req-bf579d7c-1e62-449b-9510-d317d8e059e7 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Received event network-vif-deleted-bb0ccd32-fa3c-4e68-98dc-c81a3f541a88 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.972500] env[62525]: INFO nova.compute.manager [req-e90c24a9-aafb-4403-8347-0c405bc4d78e req-bf579d7c-1e62-449b-9510-d317d8e059e7 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Neutron deleted interface bb0ccd32-fa3c-4e68-98dc-c81a3f541a88; detaching it from the instance and deleting it from the info cache [ 1466.972650] env[62525]: DEBUG nova.network.neutron [req-e90c24a9-aafb-4403-8347-0c405bc4d78e req-bf579d7c-1e62-449b-9510-d317d8e059e7 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.026557] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781230, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.091994] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Successfully updated port: b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1467.184430] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781233, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.208629] env[62525]: DEBUG nova.network.neutron [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.290037] env[62525]: DEBUG nova.compute.utils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1467.300396] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.308352] env[62525]: DEBUG oslo_vmware.api [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781229, 'name': PowerOnVM_Task, 'duration_secs': 1.056963} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.309501] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.310116] env[62525]: INFO nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Took 9.61 seconds to spawn the instance on the hypervisor. [ 1467.310477] env[62525]: DEBUG nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.311788] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802119f0-d638-4c65-8b3a-964216eafa59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.428976] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144627} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.433152] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1467.434646] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1f3633-b4ed-4b9c-9f85-4b0d1386c7ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.465477] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 61f05e69-5e90-47da-9f47-3651b580a23c/61f05e69-5e90-47da-9f47-3651b580a23c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1467.469428] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e99ff50-1fe5-43bf-8ea5-1ed17e1ec43a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.488027] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92b98bb3-9e62-4f10-9728-86e18e4c61b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.503270] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952d5750-9522-47b8-9c99-acebeebb7bcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.516082] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1467.516082] env[62525]: value = "task-1781235" [ 1467.516082] env[62525]: _type = "Task" [ 1467.516082] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.541402] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781230, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.554830] env[62525]: DEBUG nova.compute.manager [req-e90c24a9-aafb-4403-8347-0c405bc4d78e req-bf579d7c-1e62-449b-9510-d317d8e059e7 service nova] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Detach interface failed, port_id=bb0ccd32-fa3c-4e68-98dc-c81a3f541a88, reason: Instance deef59c8-f710-434d-bddc-f63bb3d518b1 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1467.592516] env[62525]: DEBUG nova.network.neutron [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Updated VIF entry in instance network info cache for port 62c6a0e2-0091-4863-a677-cbdf737769b7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1467.593801] env[62525]: DEBUG nova.network.neutron [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Updating instance_info_cache with network_info: [{"id": "62c6a0e2-0091-4863-a677-cbdf737769b7", "address": "fa:16:3e:25:d8:ac", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62c6a0e2-00", "ovs_interfaceid": "62c6a0e2-0091-4863-a677-cbdf737769b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.597970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.597970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.597970] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1467.682997] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781233, 'name': CreateVM_Task, 'duration_secs': 0.542202} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.682997] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1467.682997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.682997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.682997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1467.683698] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35356188-48a1-4ebd-925b-da49fbe244c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.691726] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1467.691726] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523b921d-5266-a7a5-5bdd-fc1c3cb4a9a5" [ 1467.691726] env[62525]: _type = "Task" [ 1467.691726] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.705208] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b921d-5266-a7a5-5bdd-fc1c3cb4a9a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.717441] env[62525]: INFO nova.compute.manager [-] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Took 2.91 seconds to deallocate network for instance. [ 1467.720799] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Successfully created port: e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1467.797134] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1467.801622] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.848132] env[62525]: INFO nova.compute.manager [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Took 61.76 seconds to build instance. [ 1467.905670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c4f58a-9c04-450f-9ee0-fe9fcd7a9ac6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.916530] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4beb87de-e22a-4071-8d91-623216d3f368 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.949811] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5c64c3-1a2b-451b-9099-ef74b4d4f0a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.958935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f585b2b-1bd3-4625-99c0-a167996311d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.974498] env[62525]: DEBUG nova.compute.provider_tree [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1468.044831] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781230, 'name': Destroy_Task, 'duration_secs': 1.112538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.045208] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.045560] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Destroyed the VM [ 1468.045889] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1468.046322] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-11828988-ba97-49e8-be4b-926e68cef854 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.056081] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1468.056081] env[62525]: value = "task-1781236" [ 1468.056081] env[62525]: _type = "Task" [ 1468.056081] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.068407] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781236, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.098533] env[62525]: DEBUG oslo_concurrency.lockutils [req-1207fece-0f40-4417-968b-86297f6a1f68 req-4575dd0f-ab5c-4c9e-90a7-a05d8e15ea18 service nova] Releasing lock "refresh_cache-5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.134604] env[62525]: DEBUG nova.network.neutron [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.206465] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b921d-5266-a7a5-5bdd-fc1c3cb4a9a5, 'name': SearchDatastore_Task, 'duration_secs': 0.024686} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.206465] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1468.208294] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.208456] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1468.208660] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.208798] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.208970] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1468.209268] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd5b538d-df8b-4505-9742-4bb5e31a9cb6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.224307] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1468.224518] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1468.225416] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38dd4947-7e42-4097-a387-ac008b332439 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.233563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.233563] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1468.233563] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526984cf-af78-087a-248a-1d3e3ace5d68" [ 1468.233563] env[62525]: _type = "Task" [ 1468.233563] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.242724] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526984cf-af78-087a-248a-1d3e3ace5d68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.293997] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.350969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-59108b41-a141-48ab-a4fb-c54167b2e07d tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.899s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.417348] env[62525]: DEBUG nova.compute.manager [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Received event network-vif-plugged-b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.417573] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Acquiring lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.417781] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.417937] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.418680] env[62525]: DEBUG nova.compute.manager [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] No waiting events found dispatching network-vif-plugged-b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1468.418943] env[62525]: WARNING nova.compute.manager [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Received unexpected event network-vif-plugged-b3e42a52-e289-4575-bfdd-06cf88ba69ce for instance with vm_state building and task_state spawning. [ 1468.419047] env[62525]: DEBUG nova.compute.manager [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Received event network-changed-b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1468.419343] env[62525]: DEBUG nova.compute.manager [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Refreshing instance network info cache due to event network-changed-b3e42a52-e289-4575-bfdd-06cf88ba69ce. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1468.419439] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Acquiring lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.473723] env[62525]: DEBUG nova.network.neutron [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Updating instance_info_cache with network_info: [{"id": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "address": "fa:16:3e:1b:b8:c5", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e42a52-e2", "ovs_interfaceid": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.477905] env[62525]: DEBUG nova.scheduler.client.report [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1468.531720] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781235, 'name': ReconfigVM_Task, 'duration_secs': 0.697592} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.533497] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 61f05e69-5e90-47da-9f47-3651b580a23c/61f05e69-5e90-47da-9f47-3651b580a23c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1468.534372] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b48a495-e55a-45cf-b6c8-1577d080f179 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.546240] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1468.546240] env[62525]: value = "task-1781237" [ 1468.546240] env[62525]: _type = "Task" [ 1468.546240] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.559105] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781237, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.569824] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781236, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.636667] env[62525]: INFO nova.compute.manager [-] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Took 2.50 seconds to deallocate network for instance. [ 1468.747254] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526984cf-af78-087a-248a-1d3e3ace5d68, 'name': SearchDatastore_Task, 'duration_secs': 0.015448} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.749023] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-582bd503-007a-421e-835d-1cef8ba7f8b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.755612] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1468.755612] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5253d80b-361b-9ffd-07d0-a41182cdd862" [ 1468.755612] env[62525]: _type = "Task" [ 1468.755612] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.768820] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5253d80b-361b-9ffd-07d0-a41182cdd862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.795783] env[62525]: DEBUG oslo_vmware.api [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781217, 'name': ReconfigVM_Task, 'duration_secs': 5.855242} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.796160] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.796418] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Reconfigured VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1468.809081] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1468.841639] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:09:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1772973878',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1685009713',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1468.841909] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1468.842076] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1468.842260] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1468.842405] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1468.842548] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1468.842748] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1468.842909] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1468.843188] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1468.843397] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1468.843579] env[62525]: DEBUG nova.virt.hardware [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1468.846886] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a96a5bc-d7be-43bf-a7f4-e96044447e47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.855854] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aee664-7c15-4cdf-b4fb-070dad92687f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.976938] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.977312] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Instance network_info: |[{"id": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "address": "fa:16:3e:1b:b8:c5", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e42a52-e2", "ovs_interfaceid": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1468.977623] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Acquired lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.977796] env[62525]: DEBUG nova.network.neutron [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Refreshing network info cache for port b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.979059] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:b8:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3e42a52-e289-4575-bfdd-06cf88ba69ce', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.992181] env[62525]: DEBUG oslo.service.loopingcall [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.994270] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.994270] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1468.998557] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1468.999527] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.882s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.999692] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.001888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.982s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.003336] env[62525]: INFO nova.compute.claims [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1469.006496] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-395c79c9-7a7f-4863-b964-270431669890 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.040889] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1469.040889] env[62525]: value = "task-1781238" [ 1469.040889] env[62525]: _type = "Task" [ 1469.040889] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.055635] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.055879] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.056180] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.056312] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.056506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.058745] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781238, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.065033] env[62525]: INFO nova.compute.manager [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Terminating instance [ 1469.065519] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781237, 'name': Rename_Task, 'duration_secs': 0.172879} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.069448] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1469.070360] env[62525]: DEBUG nova.compute.manager [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1469.070715] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1469.072036] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0253afc-b047-48c2-91e8-a5baae071201 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.074605] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545538e3-381c-4618-88f7-0405691ac431 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.080246] env[62525]: INFO nova.scheduler.client.report [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Deleted allocations for instance 3455a540-7fbc-46ba-b7d6-84a345c0463e [ 1469.085143] env[62525]: DEBUG oslo_vmware.api [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781236, 'name': RemoveSnapshot_Task, 'duration_secs': 0.579542} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.088777] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1469.089122] env[62525]: INFO nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Took 17.36 seconds to snapshot the instance on the hypervisor. [ 1469.095292] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1469.095292] env[62525]: value = "task-1781239" [ 1469.095292] env[62525]: _type = "Task" [ 1469.095292] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.095292] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1469.097794] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d44d1a1-2606-4c9d-82b4-e65c1beee3bf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.119438] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781239, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.147945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.219945] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1469.220252] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1469.220407] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1469.220680] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93bd42b9-afa8-4106-8656-a812bb10b4cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.229130] env[62525]: DEBUG oslo_vmware.api [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1469.229130] env[62525]: value = "task-1781241" [ 1469.229130] env[62525]: _type = "Task" [ 1469.229130] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.240077] env[62525]: DEBUG oslo_vmware.api [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.269486] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5253d80b-361b-9ffd-07d0-a41182cdd862, 'name': SearchDatastore_Task, 'duration_secs': 0.010879} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.269761] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.270040] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9/5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1469.270888] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78e727e1-862e-42bc-8285-d989c4a1e9ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.280119] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1469.280119] env[62525]: value = "task-1781242" [ 1469.280119] env[62525]: _type = "Task" [ 1469.280119] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.293869] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.320961] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.321298] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.404070] env[62525]: DEBUG nova.compute.manager [req-dedc56e1-0cdc-4c7e-be55-a13ee1cd1c93 req-3b70b105-25e9-42f6-9573-07d00a5ae6c4 service nova] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Received event network-vif-deleted-3dc988c5-019e-4c2d-bd0f-5e15f1e00e11 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.534489] env[62525]: DEBUG nova.compute.utils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.539063] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1469.539292] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1469.554736] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781238, 'name': CreateVM_Task, 'duration_secs': 0.431037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.558561] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.560202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.560541] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.561993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.562899] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-466e8070-d2c7-447d-b901-16dd6ac7a573 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.570575] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1469.570575] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52bd8a01-0ec7-e329-6e83-f7b87ad03efc" [ 1469.570575] env[62525]: _type = "Task" [ 1469.570575] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.581926] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52bd8a01-0ec7-e329-6e83-f7b87ad03efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.603723] env[62525]: DEBUG nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance disappeared during snapshot {{(pid=62525) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1469.610437] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18c9e9e8-fa2e-44bd-bbfa-3a9ac0bea572 tempest-ServersV294TestFqdnHostnames-617965591 tempest-ServersV294TestFqdnHostnames-617965591-project-member] Lock "3455a540-7fbc-46ba-b7d6-84a345c0463e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.596s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.616509] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781239, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.624130] env[62525]: DEBUG nova.policy [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1469.632204] env[62525]: DEBUG nova.compute.manager [None req-96ec684f-22e7-45ca-a505-a1ae63b120e1 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image not found during clean up b78e633c-4064-4554-9930-0559a5b29327 {{(pid=62525) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4501}} [ 1469.742186] env[62525]: DEBUG oslo_vmware.api [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187589} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.742186] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.742186] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.742518] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.742518] env[62525]: INFO nova.compute.manager [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Took 0.67 seconds to destroy the instance on the hypervisor. [ 1469.742743] env[62525]: DEBUG oslo.service.loopingcall [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.742944] env[62525]: DEBUG nova.compute.manager [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1469.743053] env[62525]: DEBUG nova.network.neutron [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1469.793257] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781242, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.842370] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.842545] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1469.876116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.876433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.876663] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.876824] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.876992] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.881783] env[62525]: INFO nova.compute.manager [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Terminating instance [ 1469.884813] env[62525]: DEBUG nova.compute.manager [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1469.885583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1469.886463] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40cc0ef-4759-4a2a-9d78-d45207e8c906 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.897788] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1469.897998] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20964680-5642-4339-9d6b-850fb97dddff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.907121] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1469.907121] env[62525]: value = "task-1781243" [ 1469.907121] env[62525]: _type = "Task" [ 1469.907121] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.924714] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.012965] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Successfully updated port: e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1470.023203] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.023302] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.035265] env[62525]: DEBUG nova.network.neutron [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Updated VIF entry in instance network info cache for port b3e42a52-e289-4575-bfdd-06cf88ba69ce. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.036052] env[62525]: DEBUG nova.network.neutron [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Updating instance_info_cache with network_info: [{"id": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "address": "fa:16:3e:1b:b8:c5", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3e42a52-e2", "ovs_interfaceid": "b3e42a52-e289-4575-bfdd-06cf88ba69ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.040853] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1470.089365] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52bd8a01-0ec7-e329-6e83-f7b87ad03efc, 'name': SearchDatastore_Task, 'duration_secs': 0.059927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.089738] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.089971] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1470.090515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.090515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.090920] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1470.091027] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a302d604-eefc-48e7-88fe-16e4b1f8f763 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.105771] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1470.105771] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1470.106451] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8a13ad8-500c-47cf-bd70-037d01a5c60c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.114018] env[62525]: DEBUG oslo_vmware.api [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781239, 'name': PowerOnVM_Task, 'duration_secs': 0.814029} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.114615] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1470.114907] env[62525]: INFO nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1470.115259] env[62525]: DEBUG nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1470.116233] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc1bb6c-9ff7-4657-bba0-61a68112629b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.123889] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1470.123889] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524e8790-5dd6-bdd3-c75a-6ddf62fe10a8" [ 1470.123889] env[62525]: _type = "Task" [ 1470.123889] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.140852] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524e8790-5dd6-bdd3-c75a-6ddf62fe10a8, 'name': SearchDatastore_Task, 'duration_secs': 0.019626} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.145354] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f37a464-f9ab-4d7e-bab2-fe35d5255f57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.151673] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1470.151673] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52adb232-04e3-4b93-171f-eed52115815e" [ 1470.151673] env[62525]: _type = "Task" [ 1470.151673] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.170504] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52adb232-04e3-4b93-171f-eed52115815e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.244493] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Successfully created port: 214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1470.297453] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54457} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.299682] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9/5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1470.299682] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1470.299682] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f340c3f-a9aa-482b-adfc-06d1b5aa4607 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.306566] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1470.306566] env[62525]: value = "task-1781244" [ 1470.306566] env[62525]: _type = "Task" [ 1470.306566] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.317619] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.372269] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.372433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.372705] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1470.391040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.391226] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.391434] env[62525]: DEBUG nova.network.neutron [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1470.419019] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781243, 'name': PowerOffVM_Task, 'duration_secs': 0.215207} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.422648] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1470.423125] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1470.423371] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14b1be9c-69cb-47bc-b06a-1d869d0620f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.493381] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35710546-35d3-40f8-8697-2fb3e5b861fb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.502569] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1ab58d-fc4d-4470-93e6-961e4a20d436 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.506671] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1470.506880] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1470.507078] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Deleting the datastore file [datastore1] 7c8474fd-2ca5-4ecc-b2e6-4248baafd639 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1470.507784] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9779a31f-37b1-4d0b-b47b-e34e0bc899d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.537200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.537262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.540881] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1470.540881] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1470.543258] env[62525]: DEBUG oslo_concurrency.lockutils [req-192e655b-dafb-416b-abaf-248fcc9e0062 req-ffaefc9e-e15a-49b2-994f-8ab5e751c64b service nova] Releasing lock "refresh_cache-06716b84-3761-40b0-b76a-0c6ebf0d6aa7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.544118] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f7b66c-3863-458b-b61f-5bf1897b2d66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.550522] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for the task: (returnval){ [ 1470.550522] env[62525]: value = "task-1781246" [ 1470.550522] env[62525]: _type = "Task" [ 1470.550522] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.559934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3563774f-0e05-49f0-b50e-8929b5b73008 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.568879] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.580470] env[62525]: DEBUG nova.compute.provider_tree [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.642449] env[62525]: INFO nova.compute.manager [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Took 58.43 seconds to build instance. [ 1470.664910] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52adb232-04e3-4b93-171f-eed52115815e, 'name': SearchDatastore_Task, 'duration_secs': 0.029717} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.665230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.665495] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 06716b84-3761-40b0-b76a-0c6ebf0d6aa7/06716b84-3761-40b0-b76a-0c6ebf0d6aa7.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1470.665751] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5557cde-9fbd-44a2-9348-2fc296f17651 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.673422] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1470.673422] env[62525]: value = "task-1781247" [ 1470.673422] env[62525]: _type = "Task" [ 1470.673422] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.682968] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.715405] env[62525]: DEBUG nova.network.neutron [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.819900] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073664} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.820132] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1470.820956] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e2c5c9-0148-4bc9-9849-3e71cbdf6f66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.851954] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9/5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1470.852354] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad0954f5-8eab-4c69-a63d-685d9da97bb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.879498] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1470.879498] env[62525]: value = "task-1781248" [ 1470.879498] env[62525]: _type = "Task" [ 1470.879498] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.891516] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781248, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.925839] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1470.977023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.977023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.069736] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1471.072113] env[62525]: DEBUG oslo_vmware.api [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Task: {'id': task-1781246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452703} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.073233] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.073314] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1471.073436] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1471.073611] env[62525]: INFO nova.compute.manager [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1471.073942] env[62525]: DEBUG oslo.service.loopingcall [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1471.076831] env[62525]: DEBUG nova.compute.manager [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1471.076937] env[62525]: DEBUG nova.network.neutron [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1471.081618] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.084152] env[62525]: DEBUG nova.scheduler.client.report [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1471.107446] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1471.107726] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1471.107881] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1471.108165] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1471.108257] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1471.108396] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1471.108633] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1471.108804] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1471.109069] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1471.109178] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1471.110318] env[62525]: DEBUG nova.virt.hardware [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1471.110610] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a363d1e1-54fe-4920-979d-1e0663039176 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.120888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f7d728-253a-4f27-8979-dfaa141efabe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.143590] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1471.145841] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f37c2ba-66b3-438b-a29c-7d3199176aa7 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.750s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.185527] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781247, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.219778] env[62525]: INFO nova.compute.manager [-] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Took 1.48 seconds to deallocate network for instance. [ 1471.391195] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781248, 'name': ReconfigVM_Task, 'duration_secs': 0.336884} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.391554] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9/5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1471.392160] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-201ff399-7150-4fe2-a6fc-e679b4dcea34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.399988] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1471.399988] env[62525]: value = "task-1781249" [ 1471.399988] env[62525]: _type = "Task" [ 1471.399988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.412246] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781249, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.556678] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.556998] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.557495] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.557495] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.557661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.561220] env[62525]: INFO nova.compute.manager [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Terminating instance [ 1471.563481] env[62525]: DEBUG nova.compute.manager [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1471.563718] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1471.564593] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a99d1fb-0436-49c7-9052-03c980618a9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.575636] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1471.575956] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5363140-260d-478d-85fc-84e75318c7f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.583274] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1471.583274] env[62525]: value = "task-1781250" [ 1471.583274] env[62525]: _type = "Task" [ 1471.583274] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.588920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.589480] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1471.596210] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.529s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.597892] env[62525]: INFO nova.compute.claims [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.601486] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.626229] env[62525]: DEBUG nova.network.neutron [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updating instance_info_cache with network_info: [{"id": "e95ca310-933c-4095-a25b-170fc26750e5", "address": "fa:16:3e:0b:d7:5e", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape95ca310-93", "ovs_interfaceid": "e95ca310-933c-4095-a25b-170fc26750e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.632854] env[62525]: INFO nova.network.neutron [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Port a22c0ea8-32d8-47ab-bede-4917d1b3db27 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1471.633247] env[62525]: DEBUG nova.network.neutron [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [{"id": "9c337d27-bc69-4787-a533-f523faa8aa10", "address": "fa:16:3e:36:65:6d", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c337d27-bc", "ovs_interfaceid": "9c337d27-bc69-4787-a533-f523faa8aa10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.649679] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1471.667237] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.687950] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760196} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.688277] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 06716b84-3761-40b0-b76a-0c6ebf0d6aa7/06716b84-3761-40b0-b76a-0c6ebf0d6aa7.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.688567] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.689159] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-424c90e5-bd28-46b0-8e1e-1473ba46618c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.698268] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1471.698268] env[62525]: value = "task-1781251" [ 1471.698268] env[62525]: _type = "Task" [ 1471.698268] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.712195] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.726779] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.908689] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-vif-deleted-a22c0ea8-32d8-47ab-bede-4917d1b3db27 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.908964] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Received event network-vif-plugged-e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.909116] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Acquiring lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.909361] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.909581] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.909752] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] No waiting events found dispatching network-vif-plugged-e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1471.910073] env[62525]: WARNING nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Received unexpected event network-vif-plugged-e95ca310-933c-4095-a25b-170fc26750e5 for instance with vm_state building and task_state spawning. [ 1471.910109] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Received event network-changed-e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.910284] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Refreshing instance network info cache due to event network-changed-e95ca310-933c-4095-a25b-170fc26750e5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1471.910452] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Acquiring lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.914844] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781249, 'name': Rename_Task, 'duration_secs': 0.160564} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.915113] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1471.915373] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-777346b7-c87d-4970-ad77-157e80c1d940 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.929536] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1471.929536] env[62525]: value = "task-1781252" [ 1471.929536] env[62525]: _type = "Task" [ 1471.929536] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.940221] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781252, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.042605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Acquiring lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.042834] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Acquired lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.042945] env[62525]: DEBUG nova.network.neutron [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1472.094472] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781250, 'name': PowerOffVM_Task, 'duration_secs': 0.24701} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.094771] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1472.094990] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1472.095210] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fef4454-f044-4566-aa1f-1cd5bc866619 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.105291] env[62525]: DEBUG nova.compute.utils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1472.108044] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1472.108044] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1472.130064] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.130064] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Instance network_info: |[{"id": "e95ca310-933c-4095-a25b-170fc26750e5", "address": "fa:16:3e:0b:d7:5e", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape95ca310-93", "ovs_interfaceid": "e95ca310-933c-4095-a25b-170fc26750e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1472.130608] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Acquired lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.130718] env[62525]: DEBUG nova.network.neutron [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Refreshing network info cache for port e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1472.131884] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:d7:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e95ca310-933c-4095-a25b-170fc26750e5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1472.139657] env[62525]: DEBUG oslo.service.loopingcall [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1472.143151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.145143] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1472.147097] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6706d92-5caa-4743-96a4-17ab5d3b8de8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.164174] env[62525]: DEBUG nova.policy [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eee5a510a514320b8f5eb0a6bf66121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1be24793ee4d83babc07ff8ad5abad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1472.173085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-c7603ce8-8471-4813-9faf-3667a205893c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.173085] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1472.173480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1472.173682] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1472.173861] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleting the datastore file [datastore1] 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1472.174333] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.174789] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1aae86a-72ad-448b-8aa8-0f350cca6356 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.178218] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.178659] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1472.178659] env[62525]: value = "task-1781254" [ 1472.178659] env[62525]: _type = "Task" [ 1472.178659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.178848] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.179461] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.182976] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.183243] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.183425] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1472.185189] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.186297] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1472.186297] env[62525]: value = "task-1781255" [ 1472.186297] env[62525]: _type = "Task" [ 1472.186297] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.190079] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781254, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.195111] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.199711] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.209049] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077951} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.209359] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.210169] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3114b42a-9f72-417a-825b-75fae8235d3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.244508] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 06716b84-3761-40b0-b76a-0c6ebf0d6aa7/06716b84-3761-40b0-b76a-0c6ebf0d6aa7.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.244848] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-690943dc-ce53-4181-92df-6123cc699cab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.270026] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1472.270026] env[62525]: value = "task-1781256" [ 1472.270026] env[62525]: _type = "Task" [ 1472.270026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.279248] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781256, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.292801] env[62525]: DEBUG nova.network.neutron [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.441212] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781252, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.543585] env[62525]: DEBUG nova.network.neutron [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updated VIF entry in instance network info cache for port e95ca310-933c-4095-a25b-170fc26750e5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1472.543585] env[62525]: DEBUG nova.network.neutron [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updating instance_info_cache with network_info: [{"id": "e95ca310-933c-4095-a25b-170fc26750e5", "address": "fa:16:3e:0b:d7:5e", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape95ca310-93", "ovs_interfaceid": "e95ca310-933c-4095-a25b-170fc26750e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.610313] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1472.649496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d59d5633-f9c6-48ae-b59e-6e33320ba20a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-8adc8b4b-1087-4a11-9ee8-d897f1aa83f3-a22c0ea8-32d8-47ab-bede-4917d1b3db27" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.479s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.675893] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Successfully created port: 626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.690782] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.703901] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781254, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.715618] env[62525]: DEBUG oslo_vmware.api [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379827} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.715884] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1472.716087] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1472.716265] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1472.716451] env[62525]: INFO nova.compute.manager [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1472.716660] env[62525]: DEBUG oslo.service.loopingcall [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1472.717246] env[62525]: DEBUG nova.compute.manager [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1472.717338] env[62525]: DEBUG nova.network.neutron [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1472.786844] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781256, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.798705] env[62525]: INFO nova.compute.manager [-] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Took 1.72 seconds to deallocate network for instance. [ 1472.814687] env[62525]: DEBUG nova.compute.manager [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Received event network-vif-plugged-214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1472.814687] env[62525]: DEBUG oslo_concurrency.lockutils [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] Acquiring lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.815586] env[62525]: DEBUG oslo_concurrency.lockutils [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] Lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.816029] env[62525]: DEBUG oslo_concurrency.lockutils [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] Lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.816640] env[62525]: DEBUG nova.compute.manager [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] No waiting events found dispatching network-vif-plugged-214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1472.818884] env[62525]: WARNING nova.compute.manager [req-9a3367fd-ab48-47ba-b822-4193d9318cd9 req-2bb8e485-fb90-4c87-9c84-27a237d983dd service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Received unexpected event network-vif-plugged-214d68cf-ce48-4bf5-b2e5-94a988013295 for instance with vm_state building and task_state spawning. [ 1472.943837] env[62525]: DEBUG oslo_vmware.api [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781252, 'name': PowerOnVM_Task, 'duration_secs': 0.639844} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.945091] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1472.945091] env[62525]: INFO nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 9.97 seconds to spawn the instance on the hypervisor. [ 1472.945091] env[62525]: DEBUG nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1472.945722] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad59a9b7-0556-41fb-a1b6-446b7b07ce1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.041354] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Successfully updated port: 214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1473.042899] env[62525]: DEBUG nova.network.neutron [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Updating instance_info_cache with network_info: [{"id": "b3bb2d5b-835d-4462-8234-ea61148680b4", "address": "fa:16:3e:84:79:d0", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3bb2d5b-83", "ovs_interfaceid": "b3bb2d5b-835d-4462-8234-ea61148680b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.045595] env[62525]: DEBUG oslo_concurrency.lockutils [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] Releasing lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.045595] env[62525]: DEBUG nova.compute.manager [req-ab65efea-7749-4bf7-b0f6-7b87865028d2 req-65d10a3d-0531-496c-9769-c0cacf50708f service nova] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Received event network-vif-deleted-c89b9d12-f94d-4161-bae7-150d736f9e86 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.136750] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954345c3-08c0-442b-8c43-776c52d943ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.149486] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741dee3d-337b-4c9c-9174-f381042fb840 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.196333] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dbac22-4e31-4e03-8f19-94063d8b0b1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.210690] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781254, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.212433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecd4730-8b61-4a6c-9d44-d023d1bfe35f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.229738] env[62525]: DEBUG nova.compute.provider_tree [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.282537] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781256, 'name': ReconfigVM_Task, 'duration_secs': 0.607731} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.282830] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 06716b84-3761-40b0-b76a-0c6ebf0d6aa7/06716b84-3761-40b0-b76a-0c6ebf0d6aa7.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.283472] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97f9eda8-390c-45d5-ba49-9da20b57c557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.291448] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1473.291448] env[62525]: value = "task-1781257" [ 1473.291448] env[62525]: _type = "Task" [ 1473.291448] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.301265] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781257, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.306895] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.473019] env[62525]: INFO nova.compute.manager [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 59.75 seconds to build instance. [ 1473.547520] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Releasing lock "refresh_cache-61f05e69-5e90-47da-9f47-3651b580a23c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.547577] env[62525]: DEBUG nova.compute.manager [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Inject network info {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1473.547853] env[62525]: DEBUG nova.compute.manager [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] network_info to inject: |[{"id": "b3bb2d5b-835d-4462-8234-ea61148680b4", "address": "fa:16:3e:84:79:d0", "network": {"id": "c9a9ed37-2514-464a-a4be-9ba444a72ca3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1216142432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4685480cae574a5daac6a1f077a8c319", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3bb2d5b-83", "ovs_interfaceid": "b3bb2d5b-835d-4462-8234-ea61148680b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1473.554853] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Reconfiguring VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1473.555505] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.555658] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.555829] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1473.557022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-996be9b4-304f-42ac-8a08-b8f6f25836d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.585019] env[62525]: DEBUG oslo_vmware.api [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Waiting for the task: (returnval){ [ 1473.585019] env[62525]: value = "task-1781258" [ 1473.585019] env[62525]: _type = "Task" [ 1473.585019] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.593194] env[62525]: DEBUG oslo_vmware.api [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Task: {'id': task-1781258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.622162] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1473.653804] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1473.653804] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1473.653804] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.653804] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1473.653804] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.654771] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1473.655264] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1473.655554] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1473.655852] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1473.656143] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1473.656438] env[62525]: DEBUG nova.virt.hardware [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1473.657497] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b9ba52-8ac9-41d1-9416-f1ec34962833 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.668448] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e201582-d9bb-4345-98b5-7ac487d26ffb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.702047] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781254, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.732341] env[62525]: DEBUG nova.scheduler.client.report [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.736597] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1473.802982] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781257, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.975300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3cdada9a-0c8f-4949-8a79-fdadd5e2c0a1 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.859s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.097310] env[62525]: DEBUG oslo_vmware.api [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] Task: {'id': task-1781258, 'name': ReconfigVM_Task, 'duration_secs': 0.259446} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.097479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b76e0c-b545-4f76-b7cc-7accf977d7ce tempest-ServersAdminTestJSON-1842023207 tempest-ServersAdminTestJSON-1842023207-project-admin] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Reconfigured VM instance to set the machine id {{(pid=62525) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1474.144982] env[62525]: DEBUG nova.network.neutron [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Updating instance_info_cache with network_info: [{"id": "214d68cf-ce48-4bf5-b2e5-94a988013295", "address": "fa:16:3e:f3:80:32", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d68cf-ce", "ovs_interfaceid": "214d68cf-ce48-4bf5-b2e5-94a988013295", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.203968] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781254, 'name': CreateVM_Task, 'duration_secs': 1.79008} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.204153] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1474.204984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.205102] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.206184] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1474.206810] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ebc38f7-cd11-4463-aee2-35265be68d19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.215796] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1474.215796] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5271f4b0-0716-fd5b-ac8d-add548dea3a4" [ 1474.215796] env[62525]: _type = "Task" [ 1474.215796] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.234194] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5271f4b0-0716-fd5b-ac8d-add548dea3a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.234194] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.234194] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1474.234194] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.234194] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.234194] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.234194] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91b2b217-455a-43c7-85d4-376f5e7450f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.240042] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.240281] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1474.244107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.117s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.244262] env[62525]: DEBUG nova.objects.instance [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1474.247646] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.247750] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1474.252984] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2e86f3-c8d2-4efa-ae6a-9f8e88e99343 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.261268] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1474.261268] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52006f84-1d7d-894e-4fd8-0a3d41da11ee" [ 1474.261268] env[62525]: _type = "Task" [ 1474.261268] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.269803] env[62525]: DEBUG nova.compute.manager [req-93831011-4be6-4748-a8bb-bfc6be010e17 req-f6dad7a9-385c-42a0-86cb-a0fab6af5749 service nova] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Received event network-vif-deleted-c8e1d38a-ff53-4f83-bb82-ef15ba3c18b3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1474.270144] env[62525]: DEBUG nova.compute.manager [req-93831011-4be6-4748-a8bb-bfc6be010e17 req-f6dad7a9-385c-42a0-86cb-a0fab6af5749 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Received event network-vif-deleted-9c337d27-bc69-4787-a533-f523faa8aa10 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1474.270429] env[62525]: INFO nova.compute.manager [req-93831011-4be6-4748-a8bb-bfc6be010e17 req-f6dad7a9-385c-42a0-86cb-a0fab6af5749 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Neutron deleted interface 9c337d27-bc69-4787-a533-f523faa8aa10; detaching it from the instance and deleting it from the info cache [ 1474.270667] env[62525]: DEBUG nova.network.neutron [req-93831011-4be6-4748-a8bb-bfc6be010e17 req-f6dad7a9-385c-42a0-86cb-a0fab6af5749 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.278877] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52006f84-1d7d-894e-4fd8-0a3d41da11ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.303278] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781257, 'name': Rename_Task, 'duration_secs': 0.777756} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.303528] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.303767] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffad61b0-99f0-454b-a303-0fdffb047a03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.305529] env[62525]: DEBUG nova.network.neutron [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.313462] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1474.313462] env[62525]: value = "task-1781259" [ 1474.313462] env[62525]: _type = "Task" [ 1474.313462] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.322888] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.648563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.648904] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Instance network_info: |[{"id": "214d68cf-ce48-4bf5-b2e5-94a988013295", "address": "fa:16:3e:f3:80:32", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d68cf-ce", "ovs_interfaceid": "214d68cf-ce48-4bf5-b2e5-94a988013295", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1474.649692] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:80:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '214d68cf-ce48-4bf5-b2e5-94a988013295', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1474.659069] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating folder: Project (6c87f1997d5c4739850790da5dd969fe). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1474.659473] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-668052d5-5ce6-43d9-9c60-9c7ad5181e1f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.673792] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created folder: Project (6c87f1997d5c4739850790da5dd969fe) in parent group-v369553. [ 1474.674085] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating folder: Instances. Parent ref: group-v369694. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1474.674235] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cfe4d3b-bf08-4df9-98eb-904f698d64c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.687902] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created folder: Instances in parent group-v369694. [ 1474.688223] env[62525]: DEBUG oslo.service.loopingcall [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.688441] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1474.688650] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479baa7b-62c9-4539-b1b9-2772020279ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.715331] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1474.715331] env[62525]: value = "task-1781262" [ 1474.715331] env[62525]: _type = "Task" [ 1474.715331] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.743355] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781262, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.758986] env[62525]: DEBUG nova.compute.utils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.761863] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.763659] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.775081] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77915ac1-51a9-4d43-ad00-c4f3dc6899da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.788995] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52006f84-1d7d-894e-4fd8-0a3d41da11ee, 'name': SearchDatastore_Task, 'duration_secs': 0.031177} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.792896] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56d7522-e16c-4852-908b-eac83216eb0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.799130] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3e2e22-1f12-43af-8ef5-8b3b9b33811b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.814585] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Successfully updated port: 626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.820485] env[62525]: INFO nova.compute.manager [-] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Took 2.10 seconds to deallocate network for instance. [ 1474.820485] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1474.820485] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d06d85-76bf-ec89-01fb-295fe7339a4d" [ 1474.820485] env[62525]: _type = "Task" [ 1474.820485] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.854453] env[62525]: DEBUG nova.compute.manager [req-93831011-4be6-4748-a8bb-bfc6be010e17 req-f6dad7a9-385c-42a0-86cb-a0fab6af5749 service nova] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Detach interface failed, port_id=9c337d27-bc69-4787-a533-f523faa8aa10, reason: Instance 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1474.860076] env[62525]: DEBUG oslo_vmware.api [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781259, 'name': PowerOnVM_Task, 'duration_secs': 0.480674} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.866221] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1474.866572] env[62525]: INFO nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1474.867259] env[62525]: DEBUG nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1474.867413] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d06d85-76bf-ec89-01fb-295fe7339a4d, 'name': SearchDatastore_Task, 'duration_secs': 0.021725} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.868763] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1caaaf-ed9d-474b-b85a-8c175ea58543 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.873430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.874187] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/7a92bac8-9cee-41ed-81e3-08b48432fe7c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1474.875683] env[62525]: DEBUG nova.policy [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36a7f35bf96d42b4a42e1cf71a15accb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3263280a4a14e87ac174d07c5dcb443', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.877877] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2221d61f-044d-4561-9b68-5a00ae650a18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.891784] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1474.891784] env[62525]: value = "task-1781263" [ 1474.891784] env[62525]: _type = "Task" [ 1474.891784] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.903149] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.090052] env[62525]: DEBUG nova.compute.manager [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Received event network-changed-214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1475.091960] env[62525]: DEBUG nova.compute.manager [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Refreshing instance network info cache due to event network-changed-214d68cf-ce48-4bf5-b2e5-94a988013295. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1475.091960] env[62525]: DEBUG oslo_concurrency.lockutils [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] Acquiring lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.091960] env[62525]: DEBUG oslo_concurrency.lockutils [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] Acquired lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.091960] env[62525]: DEBUG nova.network.neutron [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Refreshing network info cache for port 214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1475.226979] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781262, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.263021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-704e3150-3e05-4fe8-bd82-d7e6376cc7d8 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.264319] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.070s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.264593] env[62525]: DEBUG nova.objects.instance [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lazy-loading 'resources' on Instance uuid 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1475.273062] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1475.320814] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.320992] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.321133] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.355517] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.410540] env[62525]: INFO nova.compute.manager [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Took 47.58 seconds to build instance. [ 1475.419553] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781263, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.491124] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Successfully created port: 1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.731204] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781262, 'name': CreateVM_Task, 'duration_secs': 0.588497} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.734596] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1475.735552] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.735887] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.737036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1475.737036] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b066ba7-8caf-4375-b55d-e816a3eaf951 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.743562] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1475.743562] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5230d485-2072-4d7a-cc09-b7775a29080d" [ 1475.743562] env[62525]: _type = "Task" [ 1475.743562] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.755438] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5230d485-2072-4d7a-cc09-b7775a29080d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.895812] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.916498] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17f0ad55-2e9e-471b-be37-53a22b7ebe83 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.528s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.923107] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781263, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.043776] env[62525]: DEBUG nova.network.neutron [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Updated VIF entry in instance network info cache for port 214d68cf-ce48-4bf5-b2e5-94a988013295. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1476.044159] env[62525]: DEBUG nova.network.neutron [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Updating instance_info_cache with network_info: [{"id": "214d68cf-ce48-4bf5-b2e5-94a988013295", "address": "fa:16:3e:f3:80:32", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d68cf-ce", "ovs_interfaceid": "214d68cf-ce48-4bf5-b2e5-94a988013295", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.126291] env[62525]: DEBUG nova.network.neutron [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Updating instance_info_cache with network_info: [{"id": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "address": "fa:16:3e:07:6d:9b", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626c2e84-d2", "ovs_interfaceid": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.258779] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5230d485-2072-4d7a-cc09-b7775a29080d, 'name': SearchDatastore_Task, 'duration_secs': 0.1379} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.259594] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.259594] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1476.259594] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.259594] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.259785] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1476.259984] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b87c7b7a-1eeb-40ab-8ad0-21696fa05646 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.274323] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1476.274323] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1476.280525] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c14186b-6441-4f4c-b982-6adfcf6a7cee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.287381] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1476.298150] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1476.298150] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5295b3a6-23f0-7daf-3651-273572fac287" [ 1476.298150] env[62525]: _type = "Task" [ 1476.298150] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.303625] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6829ca70-5d2a-4718-8713-45bd556ff284 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.316948] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16985122-6646-401e-b96b-224ba66a1c04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.322826] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5295b3a6-23f0-7daf-3651-273572fac287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.357180] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1476.357395] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1476.358258] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.358258] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1476.358258] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.358258] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1476.358258] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1476.358509] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1476.358509] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1476.358641] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1476.358807] env[62525]: DEBUG nova.virt.hardware [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1476.359683] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4fc219-2047-4136-a212-4150a83ac24c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.363441] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8815ed-edda-4ad0-a2bd-08ab6018609c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.378437] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bc2647-98ab-4d25-b3f8-1ecfa205f8eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.385662] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67cddf0-b0a8-41c2-967d-bcbfb9290c6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.410869] env[62525]: DEBUG nova.compute.provider_tree [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1476.424389] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781263, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.553020] env[62525]: DEBUG oslo_concurrency.lockutils [req-0bf508ee-2abd-46a8-8ffc-8d236dda2c02 req-9dd593c0-8061-4538-8751-18d81c574d4d service nova] Releasing lock "refresh_cache-d8c7d102-46e6-40fe-a864-a72590af4982" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.614383] env[62525]: DEBUG nova.compute.manager [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Received event network-vif-plugged-626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.614682] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Acquiring lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.614737] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.614911] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.615055] env[62525]: DEBUG nova.compute.manager [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] No waiting events found dispatching network-vif-plugged-626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1476.615214] env[62525]: WARNING nova.compute.manager [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Received unexpected event network-vif-plugged-626c2e84-d2dc-4c5b-81b4-0a937a7591c9 for instance with vm_state building and task_state spawning. [ 1476.615398] env[62525]: DEBUG nova.compute.manager [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Received event network-changed-626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.615629] env[62525]: DEBUG nova.compute.manager [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Refreshing instance network info cache due to event network-changed-626c2e84-d2dc-4c5b-81b4-0a937a7591c9. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1476.615693] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Acquiring lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.635015] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.635396] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Instance network_info: |[{"id": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "address": "fa:16:3e:07:6d:9b", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626c2e84-d2", "ovs_interfaceid": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1476.636021] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Acquired lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.636273] env[62525]: DEBUG nova.network.neutron [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Refreshing network info cache for port 626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.637579] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:6d:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '626c2e84-d2dc-4c5b-81b4-0a937a7591c9', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.648204] env[62525]: DEBUG oslo.service.loopingcall [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.648204] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.648204] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a1610e7-21f2-47ac-b00b-92e48905e087 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.672556] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.672556] env[62525]: value = "task-1781264" [ 1476.672556] env[62525]: _type = "Task" [ 1476.672556] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.683175] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781264, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.811395] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5295b3a6-23f0-7daf-3651-273572fac287, 'name': SearchDatastore_Task, 'duration_secs': 0.024611} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.812300] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0a0e4b6-5750-42f2-9bc8-1d1135512bd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.818865] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1476.818865] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52badecc-4b9a-c46d-e0b3-95427faebe59" [ 1476.818865] env[62525]: _type = "Task" [ 1476.818865] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.829038] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52badecc-4b9a-c46d-e0b3-95427faebe59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.916268] env[62525]: DEBUG nova.scheduler.client.report [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1476.928387] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781263, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.867404} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.928668] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/7a92bac8-9cee-41ed-81e3-08b48432fe7c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1476.928887] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1476.929236] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04fb9bbe-ca61-41ef-b095-57f17500634d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.938696] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1476.938696] env[62525]: value = "task-1781265" [ 1476.938696] env[62525]: _type = "Task" [ 1476.938696] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.949711] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.187712] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781264, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.266613] env[62525]: INFO nova.compute.manager [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Rebuilding instance [ 1477.326016] env[62525]: DEBUG nova.compute.manager [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1477.326016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9489ccb0-12f5-4a66-8253-4973553eb962 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.340403] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52badecc-4b9a-c46d-e0b3-95427faebe59, 'name': SearchDatastore_Task, 'duration_secs': 0.016212} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.342865] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.343148] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d8c7d102-46e6-40fe-a864-a72590af4982/d8c7d102-46e6-40fe-a864-a72590af4982.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1477.343409] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c4abede-601a-481b-89f3-870c5d963157 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.357088] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1477.357088] env[62525]: value = "task-1781267" [ 1477.357088] env[62525]: _type = "Task" [ 1477.357088] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.358437] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Successfully updated port: 1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1477.369324] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.379185] env[62525]: DEBUG nova.compute.manager [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1477.379185] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8feb127f-558a-4e1a-8242-04e750a5acb2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.425027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.425777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.047s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.428099] env[62525]: INFO nova.compute.claims [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1477.453785] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105443} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.455029] env[62525]: INFO nova.scheduler.client.report [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Deleted allocations for instance 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5 [ 1477.457264] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1477.461454] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08edc067-91e6-4c06-a611-69989d12840f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.489913] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/7a92bac8-9cee-41ed-81e3-08b48432fe7c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1477.491165] env[62525]: DEBUG nova.network.neutron [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Updated VIF entry in instance network info cache for port 626c2e84-d2dc-4c5b-81b4-0a937a7591c9. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.491541] env[62525]: DEBUG nova.network.neutron [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Updating instance_info_cache with network_info: [{"id": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "address": "fa:16:3e:07:6d:9b", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626c2e84-d2", "ovs_interfaceid": "626c2e84-d2dc-4c5b-81b4-0a937a7591c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.493342] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67d462ea-297e-41b5-9207-dd02de0bf73a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.509531] env[62525]: DEBUG oslo_concurrency.lockutils [req-f4101609-ece5-4781-870e-a7d8eab650fb req-89d11ded-241e-431d-b007-9c2813762059 service nova] Releasing lock "refresh_cache-70313696-a9cc-499c-b9e6-329a71c4b915" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.518097] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1477.518097] env[62525]: value = "task-1781270" [ 1477.518097] env[62525]: _type = "Task" [ 1477.518097] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.530133] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.688608] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781264, 'name': CreateVM_Task, 'duration_secs': 0.617849} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.689013] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1477.690123] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.690522] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.691149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1477.691623] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58408359-4655-4c3f-9cb7-a05577aa2858 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.699237] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1477.699237] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f63245-faaa-89e9-a4d6-7fbd12cf9eb7" [ 1477.699237] env[62525]: _type = "Task" [ 1477.699237] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.713907] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f63245-faaa-89e9-a4d6-7fbd12cf9eb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.847708] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.848144] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b42f6ebc-f551-4e67-a608-c62c91bbf161 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.866653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.866653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.866653] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.868400] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1477.868400] env[62525]: value = "task-1781271" [ 1477.868400] env[62525]: _type = "Task" [ 1477.868400] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.877867] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781267, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.887027] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.894114] env[62525]: INFO nova.compute.manager [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] instance snapshotting [ 1477.899546] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb23754-9191-488d-b45c-6579d52f326b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.928800] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d0d6c6-3d23-4a1a-8d9f-14f65c0242bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.970049] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3de23dba-426f-4213-a4fa-31343f52cbc1 tempest-ServerShowV254Test-23581617 tempest-ServerShowV254Test-23581617-project-member] Lock "85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.503s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.032548] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781270, 'name': ReconfigVM_Task, 'duration_secs': 0.472988} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.032838] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/7a92bac8-9cee-41ed-81e3-08b48432fe7c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1478.033197] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62525) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1478.033890] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-3e1965d1-6e39-4214-acfe-37f673f9bab8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.045466] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1478.045466] env[62525]: value = "task-1781272" [ 1478.045466] env[62525]: _type = "Task" [ 1478.045466] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.056416] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781272, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.212082] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f63245-faaa-89e9-a4d6-7fbd12cf9eb7, 'name': SearchDatastore_Task, 'duration_secs': 0.085442} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.212617] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.212929] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1478.213244] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.213575] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.213689] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.213992] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-159f5425-397e-4fce-91d8-c2149f430d01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.233132] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.233410] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1478.234255] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e5b344-4b5a-4e53-921f-1e6d507539b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.242860] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1478.242860] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5289de8a-b520-f07d-162d-33f43c5eec88" [ 1478.242860] env[62525]: _type = "Task" [ 1478.242860] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.254274] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5289de8a-b520-f07d-162d-33f43c5eec88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.372234] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781267, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555122} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.372430] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] d8c7d102-46e6-40fe-a864-a72590af4982/d8c7d102-46e6-40fe-a864-a72590af4982.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1478.372707] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1478.378331] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1422109a-bda0-4988-bd89-142e5f79373b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.389725] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781271, 'name': PowerOffVM_Task, 'duration_secs': 0.23701} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.392410] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.392410] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.392613] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1478.392613] env[62525]: value = "task-1781273" [ 1478.392613] env[62525]: _type = "Task" [ 1478.392613] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.396574] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a717c78c-4337-4ab2-9a2a-04903336859a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.411017] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.414011] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.414011] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc2f8c28-5df5-4357-b63d-d43496ebbd05 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.420307] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1478.448290] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1478.449623] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-756f463e-23a2-4db1-93a8-f1df15ff860f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.459632] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1478.459632] env[62525]: value = "task-1781275" [ 1478.459632] env[62525]: _type = "Task" [ 1478.459632] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.473724] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.559751] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.559751] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.559751] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.559751] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4228744-32de-4c6c-be69-9e2cecaa5872 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.565577] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781272, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.076229} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.569297] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62525) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1478.570804] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e233603e-73ff-4b6d-ae90-37ffad851b4b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.575762] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1478.575762] env[62525]: value = "task-1781276" [ 1478.575762] env[62525]: _type = "Task" [ 1478.575762] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.600519] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/ephemeral_0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1478.604013] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-425d60b8-e85f-402e-b80f-ff3172779ca4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.621306] env[62525]: DEBUG nova.network.neutron [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating instance_info_cache with network_info: [{"id": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "address": "fa:16:3e:a6:af:d9", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac84b8a-0a", "ovs_interfaceid": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.625277] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.631801] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1478.631801] env[62525]: value = "task-1781277" [ 1478.631801] env[62525]: _type = "Task" [ 1478.631801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.644877] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781277, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.658960] env[62525]: DEBUG nova.compute.manager [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Received event network-vif-plugged-1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.659900] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.659900] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.659900] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.659900] env[62525]: DEBUG nova.compute.manager [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] No waiting events found dispatching network-vif-plugged-1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.661806] env[62525]: WARNING nova.compute.manager [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Received unexpected event network-vif-plugged-1ac84b8a-0ab3-4332-910f-4710a6864b79 for instance with vm_state building and task_state spawning. [ 1478.661806] env[62525]: DEBUG nova.compute.manager [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Received event network-changed-1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.661806] env[62525]: DEBUG nova.compute.manager [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Refreshing instance network info cache due to event network-changed-1ac84b8a-0ab3-4332-910f-4710a6864b79. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1478.661806] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Acquiring lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.755900] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5289de8a-b520-f07d-162d-33f43c5eec88, 'name': SearchDatastore_Task, 'duration_secs': 0.023938} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.756731] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-296cb4f3-df71-4bdf-bebd-6d4da473c637 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.764220] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1478.764220] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fbf890-7183-42cc-a9a3-6675d000518f" [ 1478.764220] env[62525]: _type = "Task" [ 1478.764220] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.773134] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fbf890-7183-42cc-a9a3-6675d000518f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.918822] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.2035} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.918822] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1478.918822] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf39b6c6-deda-4637-9752-40eaeabf6403 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.944856] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] d8c7d102-46e6-40fe-a864-a72590af4982/d8c7d102-46e6-40fe-a864-a72590af4982.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1478.947774] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b17a01-1541-496d-8149-1fa015a8db80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.978982] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.983823] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1478.983823] env[62525]: value = "task-1781278" [ 1478.983823] env[62525]: _type = "Task" [ 1478.983823] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.995112] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781278, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.999679] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c6f996-9743-4459-a6e4-0b6c3408bb93 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.008527] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faaab2a-934c-4804-8c3a-adc43182bdb8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.042529] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1afda74-1101-4302-a623-3104031c534a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.051575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfb084e-6150-4d7d-a772-e0f4114c4502 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.066502] env[62525]: DEBUG nova.compute.provider_tree [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.089585] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163455} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.090797] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.090797] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.090797] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.126890] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.126890] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Instance network_info: |[{"id": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "address": "fa:16:3e:a6:af:d9", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac84b8a-0a", "ovs_interfaceid": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1479.126890] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Acquired lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.126890] env[62525]: DEBUG nova.network.neutron [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Refreshing network info cache for port 1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1479.128330] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:af:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '537e0890-4fa2-4f2d-b74c-49933a4edf53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ac84b8a-0ab3-4332-910f-4710a6864b79', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1479.139866] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Creating folder: Project (c3263280a4a14e87ac174d07c5dcb443). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1479.140717] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3080a76d-d50a-4add-8072-9849349c44fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.157803] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781277, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.162791] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Created folder: Project (c3263280a4a14e87ac174d07c5dcb443) in parent group-v369553. [ 1479.162791] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Creating folder: Instances. Parent ref: group-v369701. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1479.162791] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abcb6762-9e46-4e5d-b188-e5cdf9aa6862 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.173727] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Created folder: Instances in parent group-v369701. [ 1479.173727] env[62525]: DEBUG oslo.service.loopingcall [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.173727] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1479.173727] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eda9bf97-5b6e-4fc3-b936-365a7351d4d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.199895] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1479.199895] env[62525]: value = "task-1781281" [ 1479.199895] env[62525]: _type = "Task" [ 1479.199895] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.209635] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781281, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.279558] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fbf890-7183-42cc-a9a3-6675d000518f, 'name': SearchDatastore_Task, 'duration_secs': 0.02143} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.280012] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.280082] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 70313696-a9cc-499c-b9e6-329a71c4b915/70313696-a9cc-499c-b9e6-329a71c4b915.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1479.280331] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aedc0c59-c44b-4a79-96f0-b05c47506857 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.290559] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1479.290559] env[62525]: value = "task-1781282" [ 1479.290559] env[62525]: _type = "Task" [ 1479.290559] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.301624] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.478070] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781275, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.500080] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781278, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.572628] env[62525]: DEBUG nova.scheduler.client.report [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1479.663996] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781277, 'name': ReconfigVM_Task, 'duration_secs': 0.741096} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.666369] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c/ephemeral_0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1479.666369] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2e3e400-50fc-42ac-adae-993032f36be7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.677018] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1479.677018] env[62525]: value = "task-1781283" [ 1479.677018] env[62525]: _type = "Task" [ 1479.677018] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.696119] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781283, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.720140] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781281, 'name': CreateVM_Task, 'duration_secs': 0.452846} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.720960] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.721802] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.722120] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.722726] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.723131] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b18d2618-312c-40e5-afa2-e728dd31c29d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.729314] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1479.729314] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52551291-09c9-0763-bfa7-f867693f1bf3" [ 1479.729314] env[62525]: _type = "Task" [ 1479.729314] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.738666] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52551291-09c9-0763-bfa7-f867693f1bf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.808707] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.917837] env[62525]: DEBUG nova.network.neutron [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updated VIF entry in instance network info cache for port 1ac84b8a-0ab3-4332-910f-4710a6864b79. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.918280] env[62525]: DEBUG nova.network.neutron [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating instance_info_cache with network_info: [{"id": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "address": "fa:16:3e:a6:af:d9", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac84b8a-0a", "ovs_interfaceid": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.978579] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781275, 'name': CreateSnapshot_Task, 'duration_secs': 1.151607} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.979052] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1479.979744] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132c0573-1daf-4e0d-9132-fac1a66a9203 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.006656] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781278, 'name': ReconfigVM_Task, 'duration_secs': 0.597046} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.006932] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Reconfigured VM instance instance-00000034 to attach disk [datastore1] d8c7d102-46e6-40fe-a864-a72590af4982/d8c7d102-46e6-40fe-a864-a72590af4982.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.008028] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcab2f62-cdb8-4823-ac67-f2fbff5391d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.015484] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1480.015484] env[62525]: value = "task-1781284" [ 1480.015484] env[62525]: _type = "Task" [ 1480.015484] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.031020] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781284, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.080467] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.081030] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1480.084140] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.932s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.084386] env[62525]: DEBUG nova.objects.instance [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lazy-loading 'resources' on Instance uuid c7603ce8-8471-4813-9faf-3667a205893c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.141516] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1480.141854] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1480.142077] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.142307] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1480.142542] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.142725] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1480.143035] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1480.143252] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1480.143465] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1480.143683] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1480.143934] env[62525]: DEBUG nova.virt.hardware [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1480.145295] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e35dfb3-4396-4cf8-be91-4be6c60c606d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.156968] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e4ea68-5313-4991-9781-e6f27cb01b80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.178172] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:64:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44ff1acd-1593-43a1-95fd-aceba913d7d5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.187788] env[62525]: DEBUG oslo.service.loopingcall [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.192365] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1480.193517] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0387370e-1110-4d04-aa01-980ff231c34f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.226303] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781283, 'name': Rename_Task, 'duration_secs': 0.243444} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.227040] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1480.227040] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-151c0817-afa2-4d8e-b369-82c39597ecda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.230659] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.230659] env[62525]: value = "task-1781285" [ 1480.230659] env[62525]: _type = "Task" [ 1480.230659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.241690] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1480.241690] env[62525]: value = "task-1781286" [ 1480.241690] env[62525]: _type = "Task" [ 1480.241690] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.256314] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52551291-09c9-0763-bfa7-f867693f1bf3, 'name': SearchDatastore_Task, 'duration_secs': 0.027666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.266287] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.266287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.266287] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.266287] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.266287] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1480.266287] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.266287] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781285, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.266287] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-994ab61d-230a-4dd2-857e-621382ffcbfd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.282504] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1480.282504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1480.283690] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17cb0543-7805-4db6-af6e-6be961e38626 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.299727] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1480.299727] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527ef288-420c-b7fa-eeea-4d889dfec824" [ 1480.299727] env[62525]: _type = "Task" [ 1480.299727] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.313425] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781282, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.986889} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.316871] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 70313696-a9cc-499c-b9e6-329a71c4b915/70313696-a9cc-499c-b9e6-329a71c4b915.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1480.317134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1480.317426] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ef288-420c-b7fa-eeea-4d889dfec824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.317672] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5053facc-5c2c-4645-9d5a-ffffe51c1be8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.326865] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1480.326865] env[62525]: value = "task-1781287" [ 1480.326865] env[62525]: _type = "Task" [ 1480.326865] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.339726] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.421445] env[62525]: DEBUG oslo_concurrency.lockutils [req-a25c6920-ed20-4d95-9e8a-b78ee1e4018d req-5c750e0e-7e48-40e6-9926-f87f29059d0d service nova] Releasing lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.506514] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1480.508077] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f31b4ca2-ce54-463d-8ddc-8149a926efff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.516745] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1480.516745] env[62525]: value = "task-1781289" [ 1480.516745] env[62525]: _type = "Task" [ 1480.516745] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.530598] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781284, 'name': Rename_Task, 'duration_secs': 0.316136} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.534022] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1480.534314] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.534571] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c14b575b-4699-4c76-bf0f-7079cabbc622 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.540889] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1480.540889] env[62525]: value = "task-1781290" [ 1480.540889] env[62525]: _type = "Task" [ 1480.540889] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.550771] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.592815] env[62525]: DEBUG nova.compute.utils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1480.598314] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1480.598529] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1480.647720] env[62525]: DEBUG nova.policy [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19f9c6e6a89841c5b954d33d86c15b5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '099851107d594ed39cef954e6e6e87b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1480.758961] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781286, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.759293] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781285, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.816882] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ef288-420c-b7fa-eeea-4d889dfec824, 'name': SearchDatastore_Task, 'duration_secs': 0.018928} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.817822] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8f62318-5c13-4900-a8cb-e0c843a75319 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.826152] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1480.826152] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d1236e-0a36-420d-58f2-03ed139d9f92" [ 1480.826152] env[62525]: _type = "Task" [ 1480.826152] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.838368] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d1236e-0a36-420d-58f2-03ed139d9f92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.841486] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13103} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.841773] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1480.842599] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f972c85a-cbbe-4fd6-b01a-898caf85f4a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.873999] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 70313696-a9cc-499c-b9e6-329a71c4b915/70313696-a9cc-499c-b9e6-329a71c4b915.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.877983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1678693e-3522-4718-9a19-0bd4cf9bac9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.901906] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1480.901906] env[62525]: value = "task-1781291" [ 1480.901906] env[62525]: _type = "Task" [ 1480.901906] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.917726] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781291, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.030550] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Successfully created port: 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1481.042492] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.057695] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781290, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.102732] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1481.158592] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279cd104-4787-4f87-a6f6-cb05346bb108 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.168563] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835542a9-1b20-4ec2-8b7a-a8515e1dfa20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.205457] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973a2ccc-6f1f-47f1-b5b8-074d79953703 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.214398] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b79ec5-cb21-4a1a-b02b-b356332eefb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.230989] env[62525]: DEBUG nova.compute.provider_tree [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1481.254934] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781285, 'name': CreateVM_Task, 'duration_secs': 0.539827} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.259686] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1481.259686] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781286, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.259686] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.259686] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.259944] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1481.260385] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b1a916e-8428-4c18-bd4c-1b498ad4c465 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.268617] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1481.268617] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52002a46-eabb-71e1-665e-2f1f2a5d33f7" [ 1481.268617] env[62525]: _type = "Task" [ 1481.268617] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.279058] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52002a46-eabb-71e1-665e-2f1f2a5d33f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.337340] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d1236e-0a36-420d-58f2-03ed139d9f92, 'name': SearchDatastore_Task, 'duration_secs': 0.030058} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.337706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.337884] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 94560d78-071c-419d-ad10-f42a5b2271a8/94560d78-071c-419d-ad10-f42a5b2271a8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.338160] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe78bd18-934c-4714-961d-fc2ba3c85bb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.348577] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1481.348577] env[62525]: value = "task-1781292" [ 1481.348577] env[62525]: _type = "Task" [ 1481.348577] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.357875] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.414952] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781291, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.534606] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.544100] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "dfa4b57e-6219-42eb-b257-263124f9a980" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.544100] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.558661] env[62525]: DEBUG oslo_vmware.api [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781290, 'name': PowerOnVM_Task, 'duration_secs': 0.998988} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.558661] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1481.559384] env[62525]: INFO nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Took 10.49 seconds to spawn the instance on the hypervisor. [ 1481.560032] env[62525]: DEBUG nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.560893] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cc74cf-89e6-44ac-8f66-d110f83d1326 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.734949] env[62525]: DEBUG nova.scheduler.client.report [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1481.755087] env[62525]: DEBUG oslo_vmware.api [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781286, 'name': PowerOnVM_Task, 'duration_secs': 1.430623} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.756162] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1481.756440] env[62525]: INFO nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Took 12.95 seconds to spawn the instance on the hypervisor. [ 1481.756638] env[62525]: DEBUG nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.757656] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84633a4-c1fe-4010-9b85-25ff78aa06b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.781201] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52002a46-eabb-71e1-665e-2f1f2a5d33f7, 'name': SearchDatastore_Task, 'duration_secs': 0.010083} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.781653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.781835] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1481.782256] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.782414] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.782667] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.783030] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d648fcc-f534-4d5e-8d19-3271a82ae3a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.798685] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.798842] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1481.799790] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04bbc8cc-7b41-49d5-b362-9d9bb233bd1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.807776] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1481.807776] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527e2c3a-1663-f841-3f80-4825150acd53" [ 1481.807776] env[62525]: _type = "Task" [ 1481.807776] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.821027] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527e2c3a-1663-f841-3f80-4825150acd53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.860736] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781292, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.914150] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781291, 'name': ReconfigVM_Task, 'duration_secs': 0.582684} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.914326] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 70313696-a9cc-499c-b9e6-329a71c4b915/70313696-a9cc-499c-b9e6-329a71c4b915.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1481.914829] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40b6f5f3-3eba-4393-898a-84abc88a0664 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.921335] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1481.921335] env[62525]: value = "task-1781293" [ 1481.921335] env[62525]: _type = "Task" [ 1481.921335] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.930869] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781293, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.035920] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.054022] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1482.083764] env[62525]: INFO nova.compute.manager [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Took 43.40 seconds to build instance. [ 1482.117307] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1482.148175] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1482.148175] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1482.148344] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1482.148584] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1482.148777] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1482.148970] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1482.149227] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1482.149428] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1482.150233] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1482.150429] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1482.150662] env[62525]: DEBUG nova.virt.hardware [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1482.152031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8196d6-6f5b-4a1e-a083-5ebee4bee2ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.160395] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b26610-5587-4da1-b122-fd2e4e085f71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.240471] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.246020] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.598s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.246020] env[62525]: INFO nova.compute.claims [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1482.269216] env[62525]: INFO nova.scheduler.client.report [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Deleted allocations for instance c7603ce8-8471-4813-9faf-3667a205893c [ 1482.281079] env[62525]: INFO nova.compute.manager [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Took 46.38 seconds to build instance. [ 1482.324266] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527e2c3a-1663-f841-3f80-4825150acd53, 'name': SearchDatastore_Task, 'duration_secs': 0.059079} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.325186] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ade47a51-0531-4b90-9c3a-3c1485841ec6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.333228] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1482.333228] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529b0405-c786-766a-0045-e94f578650f5" [ 1482.333228] env[62525]: _type = "Task" [ 1482.333228] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.345306] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529b0405-c786-766a-0045-e94f578650f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.359533] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555477} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.359879] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 94560d78-071c-419d-ad10-f42a5b2271a8/94560d78-071c-419d-ad10-f42a5b2271a8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1482.360107] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1482.360382] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01392972-ad20-49d3-85d7-600afc11c3da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.368246] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1482.368246] env[62525]: value = "task-1781294" [ 1482.368246] env[62525]: _type = "Task" [ 1482.368246] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.377063] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.433786] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781293, 'name': Rename_Task, 'duration_secs': 0.39578} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.433786] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1482.433786] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81b231d4-82d2-4e65-a94a-93e3d09c26b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.442031] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1482.442031] env[62525]: value = "task-1781295" [ 1482.442031] env[62525]: _type = "Task" [ 1482.442031] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.451678] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.535761] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.544625] env[62525]: DEBUG nova.compute.manager [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-vif-plugged-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1482.544820] env[62525]: DEBUG oslo_concurrency.lockutils [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.545111] env[62525]: DEBUG oslo_concurrency.lockutils [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.545337] env[62525]: DEBUG oslo_concurrency.lockutils [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.545559] env[62525]: DEBUG nova.compute.manager [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] No waiting events found dispatching network-vif-plugged-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1482.545775] env[62525]: WARNING nova.compute.manager [req-f6841bf8-e896-4e75-ae51-3368a9bf6d27 req-e5eacfe3-e7bd-4293-9b42-de337a73b6b7 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received unexpected event network-vif-plugged-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 for instance with vm_state building and task_state spawning. [ 1482.574512] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.585721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d917d1-46cd-4606-bfd5-d3dcbf750932 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.817s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.707226] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Successfully updated port: 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1482.786097] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e98bff26-b890-449e-a6d9-782249a2e488 tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.109s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.787354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-90ffa6b3-cbb8-4da6-8593-ae4ece564533 tempest-ServersTestManualDisk-1807868986 tempest-ServersTestManualDisk-1807868986-project-member] Lock "c7603ce8-8471-4813-9faf-3667a205893c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.952s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.846007] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529b0405-c786-766a-0045-e94f578650f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010718} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.846293] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.846551] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1482.846871] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ec443c3-9b83-43dc-85bd-2cd93d499c6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.856196] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1482.856196] env[62525]: value = "task-1781297" [ 1482.856196] env[62525]: _type = "Task" [ 1482.856196] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.865424] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.879921] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.954448] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781295, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.041042] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781289, 'name': CloneVM_Task, 'duration_secs': 2.473442} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.041042] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Created linked-clone VM from snapshot [ 1483.041640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8982085-0c8f-4a5f-9aec-6ace14b2d5fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.051439] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Uploading image 07fe9f47-d2af-46d3-8aa3-aba041f431cd {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1483.088409] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1483.088409] env[62525]: value = "vm-369706" [ 1483.088409] env[62525]: _type = "VirtualMachine" [ 1483.088409] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1483.089291] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e4d424f3-8793-44de-abf2-ef095a53a070 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.098226] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease: (returnval){ [ 1483.098226] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10d3d-6e63-4e21-fbc9-9d104f07b3ae" [ 1483.098226] env[62525]: _type = "HttpNfcLease" [ 1483.098226] env[62525]: } obtained for exporting VM: (result){ [ 1483.098226] env[62525]: value = "vm-369706" [ 1483.098226] env[62525]: _type = "VirtualMachine" [ 1483.098226] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1483.098527] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the lease: (returnval){ [ 1483.098527] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10d3d-6e63-4e21-fbc9-9d104f07b3ae" [ 1483.098527] env[62525]: _type = "HttpNfcLease" [ 1483.098527] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1483.106053] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1483.106053] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10d3d-6e63-4e21-fbc9-9d104f07b3ae" [ 1483.106053] env[62525]: _type = "HttpNfcLease" [ 1483.106053] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1483.210573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.210751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.210939] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.370408] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.382622] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781294, 'name': ExtendVirtualDisk_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.436036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "d8c7d102-46e6-40fe-a864-a72590af4982" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.436036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.436036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.436036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.436036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.438290] env[62525]: INFO nova.compute.manager [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Terminating instance [ 1483.440862] env[62525]: DEBUG nova.compute.manager [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1483.441072] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1483.441995] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389a0d2d-94f0-4e6b-bec1-15c0207d0716 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.458247] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781295, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.463077] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1483.463581] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43ad8331-e5dd-40d6-aa81-350c74d13d95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.472202] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1483.472202] env[62525]: value = "task-1781299" [ 1483.472202] env[62525]: _type = "Task" [ 1483.472202] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.486298] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.611279] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1483.611279] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10d3d-6e63-4e21-fbc9-9d104f07b3ae" [ 1483.611279] env[62525]: _type = "HttpNfcLease" [ 1483.611279] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1483.611851] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1483.611851] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f10d3d-6e63-4e21-fbc9-9d104f07b3ae" [ 1483.611851] env[62525]: _type = "HttpNfcLease" [ 1483.611851] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1483.612643] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937449a8-ec30-4e99-92b4-acb7a3bfe976 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.624230] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1483.624230] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1483.758685] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1483.766742] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fd6ac47d-4713-4112-bc2b-3162c8756371 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.781219] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb3b088-a074-4ef5-b6b5-c4fd824c855e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.794735] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac8f59a-11b5-47de-9b6e-2e9e9da0beeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.851256] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5a6983-b680-4bb6-b06e-89752a65a3a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.867649] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89bd854-929a-4fa6-8af6-4907a0c8f515 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.881737] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781297, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930616} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.892292] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1483.892524] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1483.893056] env[62525]: DEBUG nova.compute.provider_tree [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.897397] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9cf6de4-00fb-4dfb-912f-d14eb12991a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.900781] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.017031} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.900781] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1483.901875] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e1e8f3-65d8-467a-b3d3-67d1c6932483 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.907409] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1483.907409] env[62525]: value = "task-1781300" [ 1483.907409] env[62525]: _type = "Task" [ 1483.907409] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.925863] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 94560d78-071c-419d-ad10-f42a5b2271a8/94560d78-071c-419d-ad10-f42a5b2271a8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1483.929863] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f852b1c7-ac01-4e66-829a-bb94e1910fb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.958103] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781300, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.960069] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1483.960069] env[62525]: value = "task-1781301" [ 1483.960069] env[62525]: _type = "Task" [ 1483.960069] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.967193] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781295, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.973522] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781301, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.983091] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781299, 'name': PowerOffVM_Task, 'duration_secs': 0.318054} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.983901] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1483.983901] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1483.983901] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b2077b4-2290-420f-93b7-a94d71769eba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.991845] env[62525]: DEBUG nova.network.neutron [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.085092] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1484.085382] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1484.085600] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] d8c7d102-46e6-40fe-a864-a72590af4982 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1484.085883] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26de1aaf-dff2-4cbf-b2b8-5661945a2a21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.094210] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1484.094210] env[62525]: value = "task-1781303" [ 1484.094210] env[62525]: _type = "Task" [ 1484.094210] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.105060] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.402088] env[62525]: DEBUG nova.scheduler.client.report [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1484.431891] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781300, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103656} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.440196] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1484.440196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d886ccc-2324-4be1-a471-e8d015f12359 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.470188] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.474922] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fa3daf3-b231-4f2b-b2ed-f31b6d881f21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.496855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.497273] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Instance network_info: |[{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1484.501437] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:62:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1484.509387] env[62525]: DEBUG oslo.service.loopingcall [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1484.510248] env[62525]: DEBUG oslo_vmware.api [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781295, 'name': PowerOnVM_Task, 'duration_secs': 1.628922} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.511826] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1484.512297] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1484.512665] env[62525]: INFO nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Took 10.89 seconds to spawn the instance on the hypervisor. [ 1484.513008] env[62525]: DEBUG nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1484.513506] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1484.513506] env[62525]: value = "task-1781304" [ 1484.513506] env[62525]: _type = "Task" [ 1484.513506] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.517515] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e02886c-330f-4fee-82f1-b8f19763891e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.535235] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28c20a4-af3d-4898-bd37-99533852a8aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.539110] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.560454] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781304, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.560454] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1484.560454] env[62525]: value = "task-1781305" [ 1484.560454] env[62525]: _type = "Task" [ 1484.560454] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.570626] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781305, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.606098] env[62525]: DEBUG oslo_vmware.api [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343325} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.606385] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1484.606724] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1484.606992] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1484.607295] env[62525]: INFO nova.compute.manager [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1484.607722] env[62525]: DEBUG oslo.service.loopingcall [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1484.608036] env[62525]: DEBUG nova.compute.manager [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1484.608136] env[62525]: DEBUG nova.network.neutron [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1484.790527] env[62525]: DEBUG nova.compute.manager [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.791016] env[62525]: DEBUG nova.compute.manager [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing instance network info cache due to event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1484.792240] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.792652] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.794714] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1484.910801] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.911484] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1484.923023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.876s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.923023] env[62525]: DEBUG nova.objects.instance [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lazy-loading 'resources' on Instance uuid 1fe967d9-351a-4b44-b7cb-d3c8395d9516 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.995302] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781301, 'name': ReconfigVM_Task, 'duration_secs': 0.658785} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.995302] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 94560d78-071c-419d-ad10-f42a5b2271a8/94560d78-071c-419d-ad10-f42a5b2271a8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.996090] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3fb361c-8b47-4d73-84eb-2b8062f34788 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.010890] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1485.010890] env[62525]: value = "task-1781307" [ 1485.010890] env[62525]: _type = "Task" [ 1485.010890] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.025173] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781307, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.053692] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781304, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.077631] env[62525]: INFO nova.compute.manager [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Took 42.08 seconds to build instance. [ 1485.086353] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781305, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.424453] env[62525]: DEBUG nova.compute.utils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1485.438677] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1485.438876] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1485.441584] env[62525]: DEBUG nova.network.neutron [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.523379] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781307, 'name': Rename_Task, 'duration_secs': 0.355459} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.523665] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.523917] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8e0ea52-e3a4-4c7f-ad82-809a3534a2b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.534022] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1485.534022] env[62525]: value = "task-1781308" [ 1485.534022] env[62525]: _type = "Task" [ 1485.534022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.544197] env[62525]: DEBUG nova.policy [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5d4daa067cea43f9b123ac2201c52a49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '652aaa481e9f48c78070f1b0ffc4cadc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1485.552022] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.563898] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781304, 'name': ReconfigVM_Task, 'duration_secs': 0.603491} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.567929] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1485.567929] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6a8d07f-fea9-4831-b190-e4ac0fd743b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.583937] env[62525]: DEBUG oslo_concurrency.lockutils [None req-db419eb0-0255-4ab7-a6e7-6a67a7234f56 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.940s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.583937] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781305, 'name': CreateVM_Task, 'duration_secs': 0.59466} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.583937] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1485.583937] env[62525]: value = "task-1781309" [ 1485.583937] env[62525]: _type = "Task" [ 1485.583937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.584314] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1485.585032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.585032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.585511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1485.588592] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897a2817-1b99-4e09-b562-d517bdea58f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.600826] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1485.600826] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52820ce4-7e5c-8cda-0af4-45573372b4f6" [ 1485.600826] env[62525]: _type = "Task" [ 1485.600826] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.601118] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781309, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.616548] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52820ce4-7e5c-8cda-0af4-45573372b4f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.648339] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updated VIF entry in instance network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.648999] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.939964] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1485.945339] env[62525]: INFO nova.compute.manager [-] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Took 1.34 seconds to deallocate network for instance. [ 1485.968026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1d66ef-198b-4dcb-8ad4-6511f35c09ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.971055] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Successfully created port: 2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.979145] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fc833f-d679-4eea-89f3-f7b7cb29129d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.023969] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff3828f-c2b4-40c3-a0a8-6d6903f11834 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.038881] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf941b8-607d-4c7b-abfb-7f89154eb37a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.060108] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781308, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.061010] env[62525]: DEBUG nova.compute.provider_tree [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1486.096962] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781309, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.114266] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52820ce4-7e5c-8cda-0af4-45573372b4f6, 'name': SearchDatastore_Task, 'duration_secs': 0.021486} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.114266] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.114266] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1486.114521] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.114743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.114907] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1486.115207] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a845f0c2-7f3d-4ec2-9cdd-e377096c08a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.125263] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1486.125450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1486.126263] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-688c3e0a-7e37-43ff-be30-7f9682f047cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.134095] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1486.134095] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5212a243-63b3-6d3c-4c82-ea67f7b5b908" [ 1486.134095] env[62525]: _type = "Task" [ 1486.134095] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.148334] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5212a243-63b3-6d3c-4c82-ea67f7b5b908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.155343] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.155772] env[62525]: DEBUG nova.compute.manager [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Received event network-changed-e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.156053] env[62525]: DEBUG nova.compute.manager [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Refreshing instance network info cache due to event network-changed-e95ca310-933c-4095-a25b-170fc26750e5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1486.156708] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Acquiring lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.156708] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Acquired lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.156924] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Refreshing network info cache for port e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.451147] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.545409] env[62525]: DEBUG oslo_vmware.api [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781308, 'name': PowerOnVM_Task, 'duration_secs': 0.800216} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.545840] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.546139] env[62525]: INFO nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Took 10.26 seconds to spawn the instance on the hypervisor. [ 1486.546309] env[62525]: DEBUG nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1486.547227] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1df40b3-05fe-4400-97ac-0e493dcb221c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.564482] env[62525]: DEBUG nova.scheduler.client.report [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.595711] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781309, 'name': Rename_Task, 'duration_secs': 0.691705} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.598070] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.598616] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e698b90d-4b62-42f4-a44a-47eeb3ec5d9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.607764] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1486.607764] env[62525]: value = "task-1781310" [ 1486.607764] env[62525]: _type = "Task" [ 1486.607764] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.618952] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.647524] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5212a243-63b3-6d3c-4c82-ea67f7b5b908, 'name': SearchDatastore_Task, 'duration_secs': 0.015214} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.648460] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fde2b52e-22b7-4ec1-894c-a4e8b20b6d29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.656120] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1486.656120] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5288a9e5-3c69-ec02-587f-5770f474c607" [ 1486.656120] env[62525]: _type = "Task" [ 1486.656120] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.668738] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5288a9e5-3c69-ec02-587f-5770f474c607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.897325] env[62525]: DEBUG nova.compute.manager [req-8b33d100-19eb-4128-b61e-72aa6f916943 req-8461f6ff-869e-4792-ba47-18d99b6814db service nova] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Received event network-vif-deleted-214d68cf-ce48-4bf5-b2e5-94a988013295 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.953584] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1486.981639] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updated VIF entry in instance network info cache for port e95ca310-933c-4095-a25b-170fc26750e5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1486.982078] env[62525]: DEBUG nova.network.neutron [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updating instance_info_cache with network_info: [{"id": "e95ca310-933c-4095-a25b-170fc26750e5", "address": "fa:16:3e:0b:d7:5e", "network": {"id": "c6290db3-9e88-44af-94ba-39935fba1aee", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587456151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cae5d0f44332499ab2dbd7a69fc0aff2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape95ca310-93", "ovs_interfaceid": "e95ca310-933c-4095-a25b-170fc26750e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.990293] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1486.990623] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1486.990803] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1486.991016] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1486.991243] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1486.991428] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1486.991687] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1486.991908] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1486.992125] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1486.992317] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1486.992558] env[62525]: DEBUG nova.virt.hardware [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1486.993524] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0320248-3556-4414-8825-3fddf9328d43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.003112] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c4c1f8-566f-4c84-a9c5-6dc344d2999c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.069132] env[62525]: INFO nova.compute.manager [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Took 38.04 seconds to build instance. [ 1487.072031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.075172] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.462s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.075685] env[62525]: DEBUG nova.objects.instance [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lazy-loading 'resources' on Instance uuid 10f10329-9a7d-4e1b-8fb4-90350169e518 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1487.114574] env[62525]: INFO nova.scheduler.client.report [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted allocations for instance 1fe967d9-351a-4b44-b7cb-d3c8395d9516 [ 1487.124504] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781310, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.173997] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5288a9e5-3c69-ec02-587f-5770f474c607, 'name': SearchDatastore_Task, 'duration_secs': 0.01688} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.174322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.174622] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/0067de08-6708-4c7c-a83a-ed9df193d5cd.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1487.174897] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e32d450-f9ae-4cae-b4ba-ef554b9bfd4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.185344] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1487.185344] env[62525]: value = "task-1781312" [ 1487.185344] env[62525]: _type = "Task" [ 1487.185344] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.197042] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.486224] env[62525]: DEBUG oslo_concurrency.lockutils [req-52698d57-c962-4cbb-803d-9809ada0d933 req-c8f950b5-ae9a-4bf0-9639-7b01d1183a21 service nova] Releasing lock "refresh_cache-7a92bac8-9cee-41ed-81e3-08b48432fe7c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.572310] env[62525]: DEBUG oslo_concurrency.lockutils [None req-70cc4472-2afe-4e37-a74b-f78539603085 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.519s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.588123] env[62525]: DEBUG nova.compute.manager [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1487.589201] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7dbb1a-c948-4514-8458-05d23bc7c4aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.621494] env[62525]: DEBUG oslo_vmware.api [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781310, 'name': PowerOnVM_Task, 'duration_secs': 0.699608} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.621882] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1487.622274] env[62525]: DEBUG nova.compute.manager [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1487.626277] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988b9c54-ea02-422a-9a83-9dfc10ed176e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.633930] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e52405b-f309-457e-b2cf-b1892b84a159 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "1fe967d9-351a-4b44-b7cb-d3c8395d9516" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.593s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.704080] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781312, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.708407] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Successfully updated port: 2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.053299] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22861336-3cf6-44dd-8750-1aada6dd731b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.062902] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc8c3e8-72f9-4106-898c-dec6aba3db7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.102610] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b88b3be-4263-45e8-8422-f22190382e5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.106779] env[62525]: INFO nova.compute.manager [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] instance snapshotting [ 1488.112590] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4537e1a5-5d79-415a-b3a7-deed76c0c006 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.117954] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96626dc-e36e-45cb-aa36-d624a1fc44d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.133216] env[62525]: DEBUG nova.compute.provider_tree [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.153968] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da371ec-02de-4b78-97f2-2e5f3f630c57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.160019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.198701] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631608} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.199008] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/0067de08-6708-4c7c-a83a-ed9df193d5cd.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.199246] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1488.199514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfd17429-a612-49e9-9a0a-8b06bb19e369 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.210243] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1488.210243] env[62525]: value = "task-1781313" [ 1488.210243] env[62525]: _type = "Task" [ 1488.210243] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.214373] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.214544] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquired lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.215012] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1488.223108] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.547124] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.547932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.548238] env[62525]: INFO nova.compute.manager [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Rebooting instance [ 1488.653703] env[62525]: DEBUG nova.scheduler.client.report [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1488.668721] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1488.671016] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d6581d4e-9802-4b9e-832b-c7f75965048d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.680825] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1488.680825] env[62525]: value = "task-1781314" [ 1488.680825] env[62525]: _type = "Task" [ 1488.680825] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.691980] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781314, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.723430] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077282} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.723713] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.724886] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80fbdcc-a1b6-4e8a-a8b6-904a2c4627ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.749355] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/0067de08-6708-4c7c-a83a-ed9df193d5cd.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.750149] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3015970d-d0bc-41db-82dd-340a60bcab4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.765930] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1488.774777] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1488.774777] env[62525]: value = "task-1781316" [ 1488.774777] env[62525]: _type = "Task" [ 1488.774777] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.788695] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781316, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.985361] env[62525]: DEBUG nova.network.neutron [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Updating instance_info_cache with network_info: [{"id": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "address": "fa:16:3e:68:56:f3", "network": {"id": "fc54cea9-53af-4fab-abe6-0406820aeda6", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1381750944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "652aaa481e9f48c78070f1b0ffc4cadc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2a233e-26", "ovs_interfaceid": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.080190] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.080190] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.080190] env[62525]: DEBUG nova.network.neutron [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1489.159077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.163425] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.149s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.163805] env[62525]: DEBUG nova.objects.instance [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lazy-loading 'resources' on Instance uuid d2e7c558-02af-477c-b996-239ef14ed75b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1489.199166] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Received event network-vif-plugged-2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.200178] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Acquiring lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.200178] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.200178] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.200725] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] No waiting events found dispatching network-vif-plugged-2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1489.200725] env[62525]: WARNING nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Received unexpected event network-vif-plugged-2e2a233e-2687-4af9-9cde-b3ad26dc9a37 for instance with vm_state building and task_state spawning. [ 1489.200935] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Received event network-changed-2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.201203] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Refreshing instance network info cache due to event network-changed-2e2a233e-2687-4af9-9cde-b3ad26dc9a37. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1489.202325] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Acquiring lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.208346] env[62525]: INFO nova.scheduler.client.report [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Deleted allocations for instance 10f10329-9a7d-4e1b-8fb4-90350169e518 [ 1489.211444] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781314, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.298148] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781316, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.489726] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Releasing lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.493094] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Instance network_info: |[{"id": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "address": "fa:16:3e:68:56:f3", "network": {"id": "fc54cea9-53af-4fab-abe6-0406820aeda6", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1381750944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "652aaa481e9f48c78070f1b0ffc4cadc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2a233e-26", "ovs_interfaceid": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1489.493094] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Acquired lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.493094] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Refreshing network info cache for port 2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.493682] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:56:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2a233e-2687-4af9-9cde-b3ad26dc9a37', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1489.509878] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Creating folder: Project (652aaa481e9f48c78070f1b0ffc4cadc). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1489.515771] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a37d6354-a897-4bdd-9329-87cb8aea03d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.533529] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Created folder: Project (652aaa481e9f48c78070f1b0ffc4cadc) in parent group-v369553. [ 1489.533735] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Creating folder: Instances. Parent ref: group-v369709. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1489.534481] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04695f98-218f-4a3b-a5e7-aab96fbfb141 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.549878] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Created folder: Instances in parent group-v369709. [ 1489.550210] env[62525]: DEBUG oslo.service.loopingcall [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1489.550383] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1489.550969] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f691efb-0d83-49f1-b7a4-15d0dc0d1154 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.578627] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1489.578627] env[62525]: value = "task-1781319" [ 1489.578627] env[62525]: _type = "Task" [ 1489.578627] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.592558] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781319, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.699486] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781314, 'name': CreateSnapshot_Task, 'duration_secs': 0.932426} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.702894] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1489.703773] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad168f0-c325-49a9-8a12-772137953066 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.712922] env[62525]: INFO nova.compute.manager [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Rebuilding instance [ 1489.730517] env[62525]: DEBUG oslo_concurrency.lockutils [None req-819f3d66-8a72-4a8b-bd36-885f3b3613a3 tempest-MultipleCreateTestJSON-201506034 tempest-MultipleCreateTestJSON-201506034-project-member] Lock "10f10329-9a7d-4e1b-8fb4-90350169e518" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.358s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.762269] env[62525]: DEBUG nova.compute.manager [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1489.763301] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c62020f-d921-4aab-b49f-5d6797d84af7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.794649] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781316, 'name': ReconfigVM_Task, 'duration_secs': 0.526675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.794748] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/0067de08-6708-4c7c-a83a-ed9df193d5cd.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.799985] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7adfa734-1a75-4e35-a6a4-d4633719488b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.817919] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1489.817919] env[62525]: value = "task-1781320" [ 1489.817919] env[62525]: _type = "Task" [ 1489.817919] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.831277] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781320, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.094021] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Updated VIF entry in instance network info cache for port 2e2a233e-2687-4af9-9cde-b3ad26dc9a37. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.094021] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Updating instance_info_cache with network_info: [{"id": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "address": "fa:16:3e:68:56:f3", "network": {"id": "fc54cea9-53af-4fab-abe6-0406820aeda6", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1381750944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "652aaa481e9f48c78070f1b0ffc4cadc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2a233e-26", "ovs_interfaceid": "2e2a233e-2687-4af9-9cde-b3ad26dc9a37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.100492] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781319, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.158193] env[62525]: DEBUG nova.network.neutron [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.210650] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df75906-81c7-4dd3-be61-51d6bd4198b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.224026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05169122-b33d-4110-a41c-670bef5a7f0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.266240] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1490.267710] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65c800f7-089c-47f9-9a38-294829f0f990 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.271475] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a8cc9e-2f70-40ab-8381-96bb2e3396b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.278512] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1490.282114] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-117bbc0e-38dc-4c8c-8401-12f37c7c22f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.286321] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e22f29-b7e8-4ec7-beb0-487d172f8d92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.290419] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1490.290419] env[62525]: value = "task-1781321" [ 1490.290419] env[62525]: _type = "Task" [ 1490.290419] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.304401] env[62525]: DEBUG nova.compute.provider_tree [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.309937] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1490.309937] env[62525]: value = "task-1781322" [ 1490.309937] env[62525]: _type = "Task" [ 1490.309937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.321496] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.330625] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.337563] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781320, 'name': Rename_Task, 'duration_secs': 0.235546} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.337563] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1490.337808] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ff9bc3b-72c7-4a61-bf9f-2f93ba9ecb70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.346531] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1490.346531] env[62525]: value = "task-1781323" [ 1490.346531] env[62525]: _type = "Task" [ 1490.346531] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.357445] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.594271] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781319, 'name': CreateVM_Task, 'duration_secs': 0.52098} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.594546] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1490.595840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.597029] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.597029] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1490.597029] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8cc994a-5b6a-4682-a1c6-aa0b87eb90a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.598932] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Releasing lock "refresh_cache-1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.599193] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Received event network-changed-1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.599421] env[62525]: DEBUG nova.compute.manager [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Refreshing instance network info cache due to event network-changed-1ac84b8a-0ab3-4332-910f-4710a6864b79. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.599661] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Acquiring lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.599859] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Acquired lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.600068] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Refreshing network info cache for port 1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.607369] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1490.607369] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cf9254-2d0f-5cc4-1af1-62b31d9a745b" [ 1490.607369] env[62525]: _type = "Task" [ 1490.607369] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.620502] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cf9254-2d0f-5cc4-1af1-62b31d9a745b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.660906] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.663546] env[62525]: DEBUG nova.compute.manager [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1490.667019] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad8022e-5696-4ac9-bd69-bf4d0e078bea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.804962] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.812443] env[62525]: DEBUG nova.scheduler.client.report [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1490.830769] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781322, 'name': PowerOffVM_Task, 'duration_secs': 0.475815} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.835237] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1490.835237] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1490.835237] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def8da05-ef2f-49bb-bf54-17d54803d906 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.852289] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1490.852289] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dba8ad8a-1608-4bff-84ab-2449253cedf8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.862292] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781323, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.058100] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1491.058504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1491.058810] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1491.059265] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16b78690-8adc-47e6-a37f-7efcbef8bc2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.072764] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1491.072764] env[62525]: value = "task-1781326" [ 1491.072764] env[62525]: _type = "Task" [ 1491.072764] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.083584] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.121369] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cf9254-2d0f-5cc4-1af1-62b31d9a745b, 'name': SearchDatastore_Task, 'duration_secs': 0.020573} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.122103] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.122103] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1491.122406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.122598] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.122824] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1491.123185] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab0777b1-0c81-45b5-a278-d1ef0d6478cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.136845] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1491.136845] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1491.136845] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-025009c9-209a-4669-8ab8-2f1712b7096d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.148578] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1491.148578] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526731e1-6d64-3820-2483-d6552f2b06a5" [ 1491.148578] env[62525]: _type = "Task" [ 1491.148578] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.162202] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526731e1-6d64-3820-2483-d6552f2b06a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.304897] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.321991] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.324274] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.091s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.328560] env[62525]: DEBUG nova.objects.instance [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lazy-loading 'resources' on Instance uuid deef59c8-f710-434d-bddc-f63bb3d518b1 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1491.354208] env[62525]: INFO nova.scheduler.client.report [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Deleted allocations for instance d2e7c558-02af-477c-b996-239ef14ed75b [ 1491.367995] env[62525]: DEBUG oslo_vmware.api [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781323, 'name': PowerOnVM_Task, 'duration_secs': 0.891052} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.368282] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.368518] env[62525]: INFO nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1491.368648] env[62525]: DEBUG nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1491.371099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69abc2e5-155d-4123-a808-de55393a8c61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.408741] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updated VIF entry in instance network info cache for port 1ac84b8a-0ab3-4332-910f-4710a6864b79. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.408741] env[62525]: DEBUG nova.network.neutron [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating instance_info_cache with network_info: [{"id": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "address": "fa:16:3e:a6:af:d9", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac84b8a-0a", "ovs_interfaceid": "1ac84b8a-0ab3-4332-910f-4710a6864b79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.583891] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367092} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.584303] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1491.584494] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1491.584670] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1491.665265] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526731e1-6d64-3820-2483-d6552f2b06a5, 'name': SearchDatastore_Task, 'duration_secs': 0.017805} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.667582] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0ba4e8-f894-4f55-a2c8-fd3faff39981 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.677254] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1491.677254] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5256bf74-d9f8-927c-d9d9-ad685172e526" [ 1491.677254] env[62525]: _type = "Task" [ 1491.677254] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.690940] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318bc65f-624b-4e26-b530-4a563f253f79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.694975] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5256bf74-d9f8-927c-d9d9-ad685172e526, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.702284] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Doing hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1491.703507] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-11fe666d-ffb8-4c23-9dae-029c5145f2aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.716801] env[62525]: DEBUG oslo_vmware.api [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1491.716801] env[62525]: value = "task-1781327" [ 1491.716801] env[62525]: _type = "Task" [ 1491.716801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.730378] env[62525]: DEBUG oslo_vmware.api [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781327, 'name': ResetVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.803848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.804920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.810563] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.868409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c69d5d54-1b98-4612-8ae8-253aeb2ae0d3 tempest-AttachInterfacesUnderV243Test-389156287 tempest-AttachInterfacesUnderV243Test-389156287-project-member] Lock "d2e7c558-02af-477c-b996-239ef14ed75b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.152s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.895670] env[62525]: INFO nova.compute.manager [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Took 36.54 seconds to build instance. [ 1491.912200] env[62525]: DEBUG oslo_concurrency.lockutils [req-731ffc92-ce70-4de9-ae0c-838fbdc2692a req-5146848c-e25d-4876-a019-ce7c8c38d6ea service nova] Releasing lock "refresh_cache-94560d78-071c-419d-ad10-f42a5b2271a8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.192226] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5256bf74-d9f8-927c-d9d9-ad685172e526, 'name': SearchDatastore_Task, 'duration_secs': 0.018619} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.192518] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.192785] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6/1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1492.199288] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d70cbac2-cbde-4eb1-afaa-c30a56c0c144 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.207486] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1492.207486] env[62525]: value = "task-1781328" [ 1492.207486] env[62525]: _type = "Task" [ 1492.207486] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.225130] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.231802] env[62525]: DEBUG oslo_vmware.api [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781327, 'name': ResetVM_Task, 'duration_secs': 0.114639} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.233252] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Did hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1492.233252] env[62525]: DEBUG nova.compute.manager [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.235377] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f697e936-4e31-414a-a9b6-a021552470ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.288023] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a840bff-cec2-47ce-9548-6af09e614ea2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.304724] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5862aa5-19e2-40a3-a1b1-1ab80bf7ef9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.314179] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1492.316756] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.352314] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46704ab-9722-49fa-b3d3-677da1d4598a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.361725] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be9f8ca-b112-4349-a7c8-4dc82dc7493d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.377928] env[62525]: DEBUG nova.compute.provider_tree [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.401736] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29b7fb28-9fb0-4016-8386-ad31a840ae1d tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.873s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.631978] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1492.633110] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1492.633110] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1492.633110] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1492.633110] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1492.633110] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1492.633477] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1492.633477] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1492.633608] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1492.633712] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1492.633886] env[62525]: DEBUG nova.virt.hardware [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1492.634986] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc58534-a923-462e-a1f0-18d3a8a59e94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.647619] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec58d74c-6de6-4342-ad2e-6f1173c982d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.665346] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:64:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44ff1acd-1593-43a1-95fd-aceba913d7d5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1492.674929] env[62525]: DEBUG oslo.service.loopingcall [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.675769] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1492.675912] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1239f0f-2c38-4d7a-a47f-43d867fee250 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.702818] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1492.702818] env[62525]: value = "task-1781329" [ 1492.702818] env[62525]: _type = "Task" [ 1492.702818] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.719689] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781329, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.723861] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781328, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.753116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8546aa9-6609-4df6-b738-2a7e75b44985 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.204s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.811545] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781321, 'name': CloneVM_Task, 'duration_secs': 2.141532} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.811545] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Created linked-clone VM from snapshot [ 1492.811545] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf38749-e93d-4bc9-bc5a-3986d7912dad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.832023] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Uploading image 37bead91-bffe-4b1f-8cdb-a35c64fc8f45 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1492.855731] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1492.855731] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-76a9632f-df25-4345-875b-f9045c7de2da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.864240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.865963] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1492.865963] env[62525]: value = "task-1781330" [ 1492.865963] env[62525]: _type = "Task" [ 1492.865963] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.879680] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781330, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.884986] env[62525]: DEBUG nova.scheduler.client.report [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.215844] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781329, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.224614] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79258} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.225074] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6/1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1493.225653] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1493.226091] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7334d01c-7c01-4991-ac8d-7e7cc80f11a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.236521] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1493.236521] env[62525]: value = "task-1781332" [ 1493.236521] env[62525]: _type = "Task" [ 1493.236521] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.253643] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.378704] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781330, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.391497] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.067s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.396060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.248s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.396811] env[62525]: DEBUG nova.objects.instance [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lazy-loading 'resources' on Instance uuid 1f3792c0-9f86-4d76-a1a6-28d492869046 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.421031] env[62525]: INFO nova.scheduler.client.report [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Deleted allocations for instance deef59c8-f710-434d-bddc-f63bb3d518b1 [ 1493.450789] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1493.451802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd186883-4ba4-46f1-b61d-8e569d265e5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.459194] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1493.459382] env[62525]: ERROR oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk due to incomplete transfer. [ 1493.459602] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-70013934-86d2-4c8c-b569-23b768fadcdf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.468267] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52755afb-d4b7-107e-d0a6-b55c119d1537/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1493.468470] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Uploaded image 07fe9f47-d2af-46d3-8aa3-aba041f431cd to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1493.470947] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1493.471382] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2ade5598-56a7-413f-b69a-b31528e89379 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.481204] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1493.481204] env[62525]: value = "task-1781333" [ 1493.481204] env[62525]: _type = "Task" [ 1493.481204] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.492120] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781333, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.716628] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781329, 'name': CreateVM_Task, 'duration_secs': 0.589554} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.716628] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1493.717055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.717220] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.717534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1493.717832] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bff339bf-1295-405c-8665-00dc7e1f38fb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.725032] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1493.725032] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52805f21-8ba4-c3ad-c50a-4a6b7f9615b0" [ 1493.725032] env[62525]: _type = "Task" [ 1493.725032] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.732936] env[62525]: DEBUG nova.compute.manager [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.733421] env[62525]: DEBUG nova.compute.manager [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing instance network info cache due to event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.733539] env[62525]: DEBUG oslo_concurrency.lockutils [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.733704] env[62525]: DEBUG oslo_concurrency.lockutils [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.733853] env[62525]: DEBUG nova.network.neutron [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.744482] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52805f21-8ba4-c3ad-c50a-4a6b7f9615b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.759414] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148913} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.759493] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1493.760305] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8963741-e070-4033-95cc-065a2c731a63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.796016] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6/1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.796016] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abd1a89e-1f16-4c09-ac94-8dd672776ee9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.819541] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1493.819541] env[62525]: value = "task-1781334" [ 1493.819541] env[62525]: _type = "Task" [ 1493.819541] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.830228] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.880159] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781330, 'name': Destroy_Task, 'duration_secs': 0.719451} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.880447] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Destroyed the VM [ 1493.880687] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1493.880929] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f7bbcefe-bd19-41ca-a06b-932ae1f97fb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.889251] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1493.889251] env[62525]: value = "task-1781335" [ 1493.889251] env[62525]: _type = "Task" [ 1493.889251] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.897757] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781335, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.932737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-03dcaf3b-0501-4f43-9f12-2a814b58a3a9 tempest-ServersTestFqdnHostnames-1171532093 tempest-ServersTestFqdnHostnames-1171532093-project-member] Lock "deef59c8-f710-434d-bddc-f63bb3d518b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.300s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.995879] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781333, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.237385] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52805f21-8ba4-c3ad-c50a-4a6b7f9615b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.237714] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.238038] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.238563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.238563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.238707] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.238884] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e2fba2a-45eb-4146-b6a2-394d007d8af9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.273378] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53c0692-fb1d-49b2-9cbe-015d17430581 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.278180] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.278180] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.278687] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7518969f-a6bc-48c7-bfd2-c627393977c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.285838] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2891ce37-74aa-4757-bc46-3c2319ccf365 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.291859] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1494.291859] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fd55ee-10ef-35a1-e9ef-43d4b0f44c4a" [ 1494.291859] env[62525]: _type = "Task" [ 1494.291859] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.344633] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df5f5bd-e07e-4b1d-8977-58e5854ba5d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.352195] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd55ee-10ef-35a1-e9ef-43d4b0f44c4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.361990] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a9dd8e-87e1-43b4-8042-cf1969583fe6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.367553] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.381209] env[62525]: DEBUG nova.compute.provider_tree [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1494.401096] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781335, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.544553] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781333, 'name': Destroy_Task, 'duration_secs': 0.676159} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.544553] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Destroyed the VM [ 1494.544553] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1494.544553] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3890857e-e3ed-4875-8772-9a9733c5ab9a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.544553] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1494.544553] env[62525]: value = "task-1781336" [ 1494.544553] env[62525]: _type = "Task" [ 1494.544553] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.544553] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781336, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.801464] env[62525]: DEBUG nova.network.neutron [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updated VIF entry in instance network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.805045] env[62525]: DEBUG nova.network.neutron [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.811938] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd55ee-10ef-35a1-e9ef-43d4b0f44c4a, 'name': SearchDatastore_Task, 'duration_secs': 0.219019} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.813246] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c3f7667-5c2b-42b6-87df-917d6ab327d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.824348] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1494.824348] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cc97ae-3068-d57e-b3de-5a906e315c1f" [ 1494.824348] env[62525]: _type = "Task" [ 1494.824348] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.842446] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cc97ae-3068-d57e-b3de-5a906e315c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.013807} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.843278] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.844029] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1494.844029] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51906420-505d-45c6-a413-b6ca905b11e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.850081] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781334, 'name': ReconfigVM_Task, 'duration_secs': 0.98347} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.851469] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6/1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.851920] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c91af17-2c9d-4040-ae39-f599a2258955 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.857369] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1494.857369] env[62525]: value = "task-1781337" [ 1494.857369] env[62525]: _type = "Task" [ 1494.857369] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.863071] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1494.863071] env[62525]: value = "task-1781338" [ 1494.863071] env[62525]: _type = "Task" [ 1494.863071] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.868985] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.883014] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781338, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.900563] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781335, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.916093] env[62525]: ERROR nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] [req-2b1dc839-488c-4490-8ac2-3ad75c89446d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2b1dc839-488c-4490-8ac2-3ad75c89446d"}]} [ 1494.935040] env[62525]: DEBUG nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1494.956560] env[62525]: DEBUG nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1494.956560] env[62525]: DEBUG nova.compute.provider_tree [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1494.970607] env[62525]: DEBUG nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1494.993664] env[62525]: DEBUG nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1495.025018] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781336, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.306744] env[62525]: DEBUG oslo_concurrency.lockutils [req-dfeab3cb-1626-4c7c-8b60-b32e6484354a req-f314d6ca-7415-4872-a03d-73757c2816bd service nova] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.382424] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781338, 'name': Rename_Task, 'duration_secs': 0.16571} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.388578] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.389222] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781337, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514463} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.394056] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c879ed4-9485-49a5-b810-e007d2c4e1e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.397019] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1495.397019] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1495.397019] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84055ec2-9ddf-4840-b9cb-ce5e6a9defe2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.412142] env[62525]: DEBUG oslo_vmware.api [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781335, 'name': RemoveSnapshot_Task, 'duration_secs': 1.506636} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.420291] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1495.423870] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1495.423870] env[62525]: value = "task-1781340" [ 1495.423870] env[62525]: _type = "Task" [ 1495.423870] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.423870] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1495.423870] env[62525]: value = "task-1781339" [ 1495.423870] env[62525]: _type = "Task" [ 1495.423870] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.442547] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781339, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.449025] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.492173] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57390d2-b7be-4112-ad34-970473babe35 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.502999] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7804109b-70e9-409b-a6be-24f37d6e5d86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.537533] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711ec46a-e19a-43a7-9333-7d0bba925d08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.549341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3522b522-a272-41f1-ba1c-d477869af286 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.553085] env[62525]: DEBUG oslo_vmware.api [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781336, 'name': RemoveSnapshot_Task, 'duration_secs': 0.904633} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.553366] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1495.553585] env[62525]: INFO nova.compute.manager [None req-ca31184c-4f4c-41cd-8011-d1ca2632bbc8 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 17.66 seconds to snapshot the instance on the hypervisor. [ 1495.567798] env[62525]: DEBUG nova.compute.provider_tree [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.928937] env[62525]: WARNING nova.compute.manager [None req-f714ba1b-7909-480a-8bf0-2ecb8741c7a8 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Image not found during snapshot: nova.exception.ImageNotFound: Image 37bead91-bffe-4b1f-8cdb-a35c64fc8f45 could not be found. [ 1495.958564] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104393} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.958835] env[62525]: DEBUG oslo_vmware.api [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781339, 'name': PowerOnVM_Task, 'duration_secs': 0.490309} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.959741] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1495.960103] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1495.960321] env[62525]: INFO nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1495.960496] env[62525]: DEBUG nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.961285] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b2724-138e-4115-8872-715f979290f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.965721] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bef890-c6b5-4007-b394-dc56d5dd0c80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.992553] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.994348] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c225a30-9456-46cb-be0c-11baa66eb02c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.019533] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1496.019533] env[62525]: value = "task-1781341" [ 1496.019533] env[62525]: _type = "Task" [ 1496.019533] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.031390] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.071944] env[62525]: DEBUG nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.525056] env[62525]: INFO nova.compute.manager [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Took 38.90 seconds to build instance. [ 1496.532978] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.580023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.182s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.582201] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.500s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.585325] env[62525]: INFO nova.compute.claims [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1496.609807] env[62525]: INFO nova.scheduler.client.report [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Deleted allocations for instance 1f3792c0-9f86-4d76-a1a6-28d492869046 [ 1497.027166] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2a238d6d-5adb-4ffc-b2ec-c282a6c029d4 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.005s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.037678] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781341, 'name': ReconfigVM_Task, 'duration_secs': 0.908923} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.041505] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226/56cb0d0c-a7dd-4158-8bed-ddff050e0226.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.041505] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce834ec3-c9f0-43da-bade-f41a6ef11efe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.049507] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1497.049507] env[62525]: value = "task-1781342" [ 1497.049507] env[62525]: _type = "Task" [ 1497.049507] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.059449] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781342, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.126375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bcdda9-08ca-42a4-a1de-c69b97aa1fbb tempest-ServersTestJSON-1559293503 tempest-ServersTestJSON-1559293503-project-member] Lock "1f3792c0-9f86-4d76-a1a6-28d492869046" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.142s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.273579] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.273579] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.560728] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781342, 'name': Rename_Task, 'duration_secs': 0.297243} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.561114] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1497.561431] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7886da1-c1ba-41bb-a903-a9b08db0791e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.573324] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1497.573324] env[62525]: value = "task-1781343" [ 1497.573324] env[62525]: _type = "Task" [ 1497.573324] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.585716] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.590582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "70313696-a9cc-499c-b9e6-329a71c4b915" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.590819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.591028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.591235] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.591402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.593414] env[62525]: INFO nova.compute.manager [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Terminating instance [ 1497.598712] env[62525]: DEBUG nova.compute.manager [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1497.598876] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1497.599988] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9285a8b-be28-4b7e-8a6c-82cdf8089294 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.613825] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.613825] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6554f6e3-e7e5-4ecc-a5be-02768f258524 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.627146] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1497.627146] env[62525]: value = "task-1781344" [ 1497.627146] env[62525]: _type = "Task" [ 1497.627146] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.640370] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.774068] env[62525]: DEBUG nova.compute.manager [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1497.774330] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b67bda-d963-4d6d-9ae6-0a8fd60e4e39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.777442] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1498.076244] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.080135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.080458] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.080767] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.080997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.090490] env[62525]: INFO nova.compute.manager [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Terminating instance [ 1498.090490] env[62525]: DEBUG nova.compute.manager [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1498.090490] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.091088] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016baf7f-79b1-4d27-b63b-e6b8a7248950 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.097885] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.100069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ee4c1d-61b0-4afd-8905-6ca049354784 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.104844] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.105485] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a2ffdee-9752-41e8-afef-00970b76911d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.109942] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9983903-ef46-4462-ad92-b6b5a05edcc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.116811] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1498.116811] env[62525]: value = "task-1781345" [ 1498.116811] env[62525]: _type = "Task" [ 1498.116811] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.156504] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7d581f-74c7-4b7f-bcc8-a0023900e8b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.162924] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.172861] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042990c2-141b-487a-b7f7-7a192eba75cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.179031] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781344, 'name': PowerOffVM_Task, 'duration_secs': 0.431674} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.179224] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.179481] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.180132] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24238145-542d-4925-8e8d-32925af7feba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.191940] env[62525]: DEBUG nova.compute.provider_tree [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.292177] env[62525]: INFO nova.compute.manager [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] instance snapshotting [ 1498.295211] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e30194-65ed-482f-bac5-ee3c3f44092d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.299041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.316164] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f3c442-aec0-42cf-a810-43c801895fdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.435090] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.435479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.435479] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleting the datastore file [datastore1] 70313696-a9cc-499c-b9e6-329a71c4b915 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.435844] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56abe642-9718-4212-bcfd-992feba45262 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.450222] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1498.450222] env[62525]: value = "task-1781347" [ 1498.450222] env[62525]: _type = "Task" [ 1498.450222] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.464411] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.594674] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781343, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.630504] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781345, 'name': PowerOffVM_Task, 'duration_secs': 0.406082} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.630872] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.630980] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.631230] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c0fc44c-94b2-42cf-a4ce-1bd328a80779 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.695764] env[62525]: DEBUG nova.scheduler.client.report [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1498.751286] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.751517] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.751699] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Deleting the datastore file [datastore1] 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.752012] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c46945cb-9f7f-4f0d-89ba-6307793857c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.761359] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for the task: (returnval){ [ 1498.761359] env[62525]: value = "task-1781349" [ 1498.761359] env[62525]: _type = "Task" [ 1498.761359] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.772486] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.832019] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1498.832019] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-17061a6e-0859-4269-82de-1e2e0951f17c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.840548] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1498.840548] env[62525]: value = "task-1781350" [ 1498.840548] env[62525]: _type = "Task" [ 1498.840548] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.860919] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781350, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.961913] env[62525]: DEBUG oslo_vmware.api [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19501} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.966024] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.966024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.966024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.966024] env[62525]: INFO nova.compute.manager [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1498.966024] env[62525]: DEBUG oslo.service.loopingcall [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.966024] env[62525]: DEBUG nova.compute.manager [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1498.966024] env[62525]: DEBUG nova.network.neutron [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.096376] env[62525]: DEBUG oslo_vmware.api [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781343, 'name': PowerOnVM_Task, 'duration_secs': 1.131333} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.096611] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1499.096731] env[62525]: DEBUG nova.compute.manager [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1499.097541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adde87b-5cfb-4be0-8e31-5555a3719ed4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.201979] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.202680] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1499.205862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.479s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.205981] env[62525]: DEBUG nova.objects.instance [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'resources' on Instance uuid f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1499.273874] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.353266] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781350, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.611055] env[62525]: DEBUG nova.compute.manager [req-e4702d23-5d1c-4b0f-b55c-ecaa323ffc0d req-5bcde150-621b-470f-94d6-920d77231b52 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Received event network-vif-deleted-626c2e84-d2dc-4c5b-81b4-0a937a7591c9 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.611248] env[62525]: INFO nova.compute.manager [req-e4702d23-5d1c-4b0f-b55c-ecaa323ffc0d req-5bcde150-621b-470f-94d6-920d77231b52 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Neutron deleted interface 626c2e84-d2dc-4c5b-81b4-0a937a7591c9; detaching it from the instance and deleting it from the info cache [ 1499.611449] env[62525]: DEBUG nova.network.neutron [req-e4702d23-5d1c-4b0f-b55c-ecaa323ffc0d req-5bcde150-621b-470f-94d6-920d77231b52 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.624987] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.710491] env[62525]: DEBUG nova.compute.utils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1499.717933] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1499.718249] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1499.775841] env[62525]: DEBUG oslo_vmware.api [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Task: {'id': task-1781349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575312} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.781525] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.782064] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.782508] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.782906] env[62525]: INFO nova.compute.manager [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1499.783410] env[62525]: DEBUG oslo.service.loopingcall [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.784174] env[62525]: DEBUG nova.compute.manager [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.784886] env[62525]: DEBUG nova.network.neutron [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.832529] env[62525]: DEBUG nova.policy [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0deb1ab43142f29a15397a2e23d048', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105f108590e14c649fff545b5b96f4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1499.856035] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781350, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.947657] env[62525]: DEBUG nova.network.neutron [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.125044] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f51f0f6a-97df-414e-8287-daa0e1d5fcf0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.140091] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d0b25f-e04e-4654-8fd2-66b81f1af3cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.182492] env[62525]: DEBUG nova.compute.manager [req-e4702d23-5d1c-4b0f-b55c-ecaa323ffc0d req-5bcde150-621b-470f-94d6-920d77231b52 service nova] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Detach interface failed, port_id=626c2e84-d2dc-4c5b-81b4-0a937a7591c9, reason: Instance 70313696-a9cc-499c-b9e6-329a71c4b915 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1500.213465] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1500.228149] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84a5401-fe00-4911-9ba9-cdff5c2ca98a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.236819] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b30484-7552-420f-a66c-82aca2ab1169 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.306412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cdb37d-56d2-4cd6-b5de-2ae4e1776b08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.319907] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5382e130-c40d-409d-a938-5fad4b2ece40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.348279] env[62525]: DEBUG nova.compute.provider_tree [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1500.366218] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781350, 'name': CreateSnapshot_Task, 'duration_secs': 1.210206} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.366625] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1500.367872] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e82a3a-63ca-426d-b509-94da5b0e1ce2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.455229] env[62525]: INFO nova.compute.manager [-] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Took 1.49 seconds to deallocate network for instance. [ 1500.853577] env[62525]: DEBUG nova.scheduler.client.report [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1500.894480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1500.895173] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9198ab31-146e-4678-981a-3102d60970b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.905282] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1500.905282] env[62525]: value = "task-1781351" [ 1500.905282] env[62525]: _type = "Task" [ 1500.905282] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.919472] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781351, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.962809] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.989308] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Successfully created port: 8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.991198] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "61f05e69-5e90-47da-9f47-3651b580a23c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.991438] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.991639] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.992073] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.992073] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.994764] env[62525]: INFO nova.compute.manager [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Terminating instance [ 1500.996880] env[62525]: DEBUG nova.compute.manager [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1500.997113] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1500.998055] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682cdc4d-6f6f-4451-a9fa-5312d723a2dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.006841] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1501.007305] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23531664-c134-4cd9-96f2-46b1c922e043 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.018653] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1501.018653] env[62525]: value = "task-1781352" [ 1501.018653] env[62525]: _type = "Task" [ 1501.018653] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.040542] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.227888] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1501.258713] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1501.258713] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1501.258713] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.258713] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1501.259235] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.259303] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1501.259515] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1501.259673] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1501.259837] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1501.259997] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1501.261078] env[62525]: DEBUG nova.virt.hardware [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1501.262489] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91e7e4f-27a3-4435-aee0-0ad082ae41ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.275135] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c181e2-c2c3-4582-894c-e5ff9e634972 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.329178] env[62525]: DEBUG nova.network.neutron [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.358358] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.361706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.167s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.363975] env[62525]: INFO nova.compute.claims [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1501.399512] env[62525]: INFO nova.scheduler.client.report [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted allocations for instance f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b [ 1501.424241] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781351, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.539314] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781352, 'name': PowerOffVM_Task, 'duration_secs': 0.222543} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.539429] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.539562] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1501.539822] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8800300f-f56d-49c6-a6f2-59dab0a3fd9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.633681] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1501.633892] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1501.634082] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] 61f05e69-5e90-47da-9f47-3651b580a23c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1501.634346] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e49cb57-2a40-42cd-9b74-84d7ab53c587 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.644227] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1501.644227] env[62525]: value = "task-1781354" [ 1501.644227] env[62525]: _type = "Task" [ 1501.644227] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.661812] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.729946] env[62525]: DEBUG nova.compute.manager [req-6065245f-fa29-44ca-86d3-18e04148e6c2 req-ef97d845-a58d-40f8-b398-65b5ef849a70 service nova] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Received event network-vif-deleted-2e2a233e-2687-4af9-9cde-b3ad26dc9a37 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1501.831244] env[62525]: INFO nova.compute.manager [-] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Took 2.05 seconds to deallocate network for instance. [ 1501.920374] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1902d162-d039-475d-892f-c8f0ec4f21a0 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.864s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.922503] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781351, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.158872] env[62525]: DEBUG oslo_vmware.api [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139103} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.159613] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1502.159823] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1502.160030] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1502.160284] env[62525]: INFO nova.compute.manager [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1502.160568] env[62525]: DEBUG oslo.service.loopingcall [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.160797] env[62525]: DEBUG nova.compute.manager [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1502.160953] env[62525]: DEBUG nova.network.neutron [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1502.344984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.422779] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781351, 'name': CloneVM_Task, 'duration_secs': 1.322008} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.423494] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Created linked-clone VM from snapshot [ 1502.425615] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cb4ca7-9217-4ddc-86ce-0f07c8dfe9ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.447129] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Uploading image bd6d630f-1173-4f94-9676-057d6f46f208 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1502.478645] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1502.478645] env[62525]: value = "vm-369716" [ 1502.478645] env[62525]: _type = "VirtualMachine" [ 1502.478645] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1502.479472] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-347bcb3d-8291-41d7-9276-64a9b3f11625 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.490153] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease: (returnval){ [ 1502.490153] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ca4e58-2923-1af7-ae06-6f8dbc172478" [ 1502.490153] env[62525]: _type = "HttpNfcLease" [ 1502.490153] env[62525]: } obtained for exporting VM: (result){ [ 1502.490153] env[62525]: value = "vm-369716" [ 1502.490153] env[62525]: _type = "VirtualMachine" [ 1502.490153] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1502.490471] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the lease: (returnval){ [ 1502.490471] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ca4e58-2923-1af7-ae06-6f8dbc172478" [ 1502.490471] env[62525]: _type = "HttpNfcLease" [ 1502.490471] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1502.504672] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1502.504672] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ca4e58-2923-1af7-ae06-6f8dbc172478" [ 1502.504672] env[62525]: _type = "HttpNfcLease" [ 1502.504672] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1502.874515] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba5e427-b0c0-4d33-9573-7071568f98e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.886179] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c403153-44ed-4406-a54a-8ed88d9c510c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.936904] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae8b9f8-9efc-4dc5-907a-380ae51679f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.947160] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1425d165-8352-4e7b-bdb2-c7e49c8eaf62 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.964084] env[62525]: DEBUG nova.compute.provider_tree [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.004127] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1503.004127] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ca4e58-2923-1af7-ae06-6f8dbc172478" [ 1503.004127] env[62525]: _type = "HttpNfcLease" [ 1503.004127] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1503.004127] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1503.004127] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ca4e58-2923-1af7-ae06-6f8dbc172478" [ 1503.004127] env[62525]: _type = "HttpNfcLease" [ 1503.004127] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1503.004127] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6769fac6-bd64-4cc3-a382-2998fca266ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.009778] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Successfully updated port: 8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1503.019271] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1503.019649] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1503.156476] env[62525]: DEBUG nova.network.neutron [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.176819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "0a7ef997-bda5-452e-abe0-537146bf23f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.176819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.197923] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6d79e159-f6b0-4671-98e9-5b1820495f33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.467711] env[62525]: DEBUG nova.scheduler.client.report [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1503.517031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.517031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.517031] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1503.661543] env[62525]: INFO nova.compute.manager [-] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Took 1.50 seconds to deallocate network for instance. [ 1503.681502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.682755] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.685794] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1503.835780] env[62525]: DEBUG nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Received event network-vif-deleted-b3bb2d5b-835d-4462-8234-ea61148680b4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1503.836008] env[62525]: DEBUG nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Received event network-vif-plugged-8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1503.836231] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Acquiring lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.837159] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.837159] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.837446] env[62525]: DEBUG nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] No waiting events found dispatching network-vif-plugged-8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1503.837695] env[62525]: WARNING nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Received unexpected event network-vif-plugged-8fd85844-87de-4df5-a881-9ed796e8af51 for instance with vm_state building and task_state spawning. [ 1503.837966] env[62525]: DEBUG nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Received event network-changed-8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1503.838193] env[62525]: DEBUG nova.compute.manager [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Refreshing instance network info cache due to event network-changed-8fd85844-87de-4df5-a881-9ed796e8af51. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1503.838493] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Acquiring lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.973760] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.974146] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1503.980495] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.290s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.980786] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.981033] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1503.981406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.676s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.981794] env[62525]: DEBUG nova.objects.instance [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lazy-loading 'resources' on Instance uuid 7c8474fd-2ca5-4ecc-b2e6-4248baafd639 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1503.985841] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adcd927-8d5a-4380-8497-22af773abaef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.998735] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36f65af-4e08-49c1-aae5-dace1f8d5723 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.017797] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84085512-f173-45bd-be39-deff553eb975 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.029480] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604f650e-78b2-44f4-a38f-f06ad37d012d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.068435] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179446MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1504.068930] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.069919] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.105419] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.105946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.176220] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.199389] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1504.225816] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.244318] env[62525]: DEBUG nova.network.neutron [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Updating instance_info_cache with network_info: [{"id": "8fd85844-87de-4df5-a881-9ed796e8af51", "address": "fa:16:3e:2d:ea:1f", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd85844-87", "ovs_interfaceid": "8fd85844-87de-4df5-a881-9ed796e8af51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.459332] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.459600] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.488603] env[62525]: DEBUG nova.compute.utils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1504.491356] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1504.491546] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1504.543302] env[62525]: DEBUG nova.policy [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83bf79d024f345a9a8c02004f8cefbaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab7fca262814290a975bf85badc9b71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1504.608904] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1504.728549] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.750894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.751388] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Instance network_info: |[{"id": "8fd85844-87de-4df5-a881-9ed796e8af51", "address": "fa:16:3e:2d:ea:1f", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd85844-87", "ovs_interfaceid": "8fd85844-87de-4df5-a881-9ed796e8af51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1504.751947] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Acquired lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.752185] env[62525]: DEBUG nova.network.neutron [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Refreshing network info cache for port 8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.753632] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:ea:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fd85844-87de-4df5-a881-9ed796e8af51', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1504.763963] env[62525]: DEBUG oslo.service.loopingcall [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.771487] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1504.772493] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22d6585d-e4d6-4b8e-a209-55665d207f29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.798916] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1504.798916] env[62525]: value = "task-1781356" [ 1504.798916] env[62525]: _type = "Task" [ 1504.798916] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.810255] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781356, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.913724] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Successfully created port: 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1504.962845] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1504.995273] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1505.079134] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabe2654-f96e-40f5-9856-dd1a2311d36e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.089373] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58021b7-37c6-49e3-bb55-d15f3ae2b8b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.133793] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ccd6c8-9d4f-437b-9ca0-68ba42ccfc4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.143359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5826b9-a5d9-41c5-85a9-7695ee9ff521 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.149161] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.160239] env[62525]: DEBUG nova.compute.provider_tree [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.200605] env[62525]: DEBUG nova.network.neutron [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Updated VIF entry in instance network info cache for port 8fd85844-87de-4df5-a881-9ed796e8af51. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1505.201245] env[62525]: DEBUG nova.network.neutron [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Updating instance_info_cache with network_info: [{"id": "8fd85844-87de-4df5-a881-9ed796e8af51", "address": "fa:16:3e:2d:ea:1f", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fd85844-87", "ovs_interfaceid": "8fd85844-87de-4df5-a881-9ed796e8af51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.314420] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781356, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.485982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.663955] env[62525]: DEBUG nova.scheduler.client.report [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1505.704106] env[62525]: DEBUG oslo_concurrency.lockutils [req-5e002355-ea6f-43cf-9562-5b5e6dd643fb req-5533d348-01a0-4a25-964a-d40358d651c1 service nova] Releasing lock "refresh_cache-cafae62e-b001-4ee0-8e89-4da9c60cf488" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.814357] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781356, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.004641] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1506.028106] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1506.028447] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1506.029414] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1506.029667] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1506.029851] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1506.030047] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1506.030321] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1506.030515] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1506.030755] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1506.030985] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1506.031228] env[62525]: DEBUG nova.virt.hardware [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1506.032220] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0483e659-2c0a-45ae-9569-8aa63b506ba4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.040739] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad71903-3e17-4486-9481-5968bb70ec64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.169920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.172435] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.818s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.172435] env[62525]: DEBUG nova.objects.instance [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'resources' on Instance uuid 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1506.194158] env[62525]: INFO nova.scheduler.client.report [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Deleted allocations for instance 7c8474fd-2ca5-4ecc-b2e6-4248baafd639 [ 1506.312023] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781356, 'name': CreateVM_Task, 'duration_secs': 1.504147} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.312217] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1506.312927] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.313108] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.313478] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1506.314141] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735d13f5-b729-423f-8172-2d544489b22d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.319653] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1506.319653] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523d6ead-bde2-16a9-0b03-1e5a035f454c" [ 1506.319653] env[62525]: _type = "Task" [ 1506.319653] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.329151] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523d6ead-bde2-16a9-0b03-1e5a035f454c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.498948] env[62525]: DEBUG nova.compute.manager [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received event network-vif-plugged-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1506.499360] env[62525]: DEBUG oslo_concurrency.lockutils [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] Acquiring lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.499429] env[62525]: DEBUG oslo_concurrency.lockutils [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.499561] env[62525]: DEBUG oslo_concurrency.lockutils [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.499757] env[62525]: DEBUG nova.compute.manager [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] No waiting events found dispatching network-vif-plugged-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1506.499891] env[62525]: WARNING nova.compute.manager [req-e0113f5c-e9a1-4c2a-884f-11f4dc289d48 req-697d61ce-ec2c-4349-8edc-ca75dc8a977a service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received unexpected event network-vif-plugged-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f for instance with vm_state building and task_state spawning. [ 1506.639797] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Successfully updated port: 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1506.702113] env[62525]: DEBUG oslo_concurrency.lockutils [None req-323b8b00-0d04-4ad2-8ca7-b4e30c6f23ac tempest-TenantUsagesTestJSON-1168492701 tempest-TenantUsagesTestJSON-1168492701-project-member] Lock "7c8474fd-2ca5-4ecc-b2e6-4248baafd639" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.826s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.835339] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523d6ead-bde2-16a9-0b03-1e5a035f454c, 'name': SearchDatastore_Task, 'duration_secs': 0.016795} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.835935] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.835935] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1506.836160] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.836305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.836496] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.836807] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9729655-8685-410e-a14e-e3e55f568aac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.850352] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.850612] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.851906] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d4ccd2-f43c-422c-b6a7-746d94ebb1ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.864742] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1506.864742] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5263263c-bb7f-8569-1f10-2b2fe49780f5" [ 1506.864742] env[62525]: _type = "Task" [ 1506.864742] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.875723] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5263263c-bb7f-8569-1f10-2b2fe49780f5, 'name': SearchDatastore_Task, 'duration_secs': 0.011161} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.880114] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-176fe265-0e2c-4738-893c-13e402417be5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.888135] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1506.888135] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5289e78a-56a8-7e9f-0b7c-ace4d6e48519" [ 1506.888135] env[62525]: _type = "Task" [ 1506.888135] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.899821] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5289e78a-56a8-7e9f-0b7c-ace4d6e48519, 'name': SearchDatastore_Task, 'duration_secs': 0.010576} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.900091] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.900387] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cafae62e-b001-4ee0-8e89-4da9c60cf488/cafae62e-b001-4ee0-8e89-4da9c60cf488.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.901054] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c87e80a-2dab-4adf-9a00-f5d9b8874547 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.910334] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1506.910334] env[62525]: value = "task-1781357" [ 1506.910334] env[62525]: _type = "Task" [ 1506.910334] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.919936] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.077532] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f7f5b4-079c-44af-93b5-3f05ba74276f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.088526] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6659c42-40c6-4b1a-92e0-f63d10a34470 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.127566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc23942-a6d3-43da-a166-33ae0121636b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.136977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fe7d35-792f-46c2-818e-3626cb5921cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.142269] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.142349] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.142587] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1507.161730] env[62525]: DEBUG nova.compute.provider_tree [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.420975] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781357, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.667966] env[62525]: DEBUG nova.scheduler.client.report [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1507.683201] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1507.827739] env[62525]: DEBUG nova.network.neutron [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.924217] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525314} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.924570] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cafae62e-b001-4ee0-8e89-4da9c60cf488/cafae62e-b001-4ee0-8e89-4da9c60cf488.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1507.924893] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1507.925238] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b877892d-3b53-4c6d-b558-d242786f2d83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.934074] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1507.934074] env[62525]: value = "task-1781358" [ 1507.934074] env[62525]: _type = "Task" [ 1507.934074] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.945840] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.172862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.176450] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.602s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.179640] env[62525]: INFO nova.compute.claims [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.202508] env[62525]: INFO nova.scheduler.client.report [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted allocations for instance 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3 [ 1508.330137] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.330593] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Instance network_info: |[{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1508.331332] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:49:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52d67f7f-d861-4c0b-bfa2-c2f41085fb6f', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1508.339031] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating folder: Project (eab7fca262814290a975bf85badc9b71). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1508.339274] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3922e7f4-edaf-40cc-a548-ef6189c1ea10 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.354688] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created folder: Project (eab7fca262814290a975bf85badc9b71) in parent group-v369553. [ 1508.354850] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating folder: Instances. Parent ref: group-v369718. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1508.355191] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ec018e0-415b-443c-8925-bff854661b86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.366322] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created folder: Instances in parent group-v369718. [ 1508.366454] env[62525]: DEBUG oslo.service.loopingcall [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1508.366647] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1508.366934] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9df873c7-5365-4ed3-8b8a-e4b1a83c63dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.389463] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1508.389463] env[62525]: value = "task-1781361" [ 1508.389463] env[62525]: _type = "Task" [ 1508.389463] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.398842] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781361, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.444974] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087201} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.445346] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1508.446264] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f51052b-d071-4162-a61c-31df243165fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.471875] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] cafae62e-b001-4ee0-8e89-4da9c60cf488/cafae62e-b001-4ee0-8e89-4da9c60cf488.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1508.472564] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a147ecdb-6246-4a2b-ad7b-f438c7396559 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.494557] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1508.494557] env[62525]: value = "task-1781362" [ 1508.494557] env[62525]: _type = "Task" [ 1508.494557] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.504283] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.529427] env[62525]: DEBUG nova.compute.manager [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1508.529700] env[62525]: DEBUG nova.compute.manager [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing instance network info cache due to event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1508.529841] env[62525]: DEBUG oslo_concurrency.lockutils [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.530020] env[62525]: DEBUG oslo_concurrency.lockutils [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.530418] env[62525]: DEBUG nova.network.neutron [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1508.713904] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c90d6d2a-70d7-471f-865c-4addd6be5185 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "8adc8b4b-1087-4a11-9ee8-d897f1aa83f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.157s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.904253] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781361, 'name': CreateVM_Task, 'duration_secs': 0.491541} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.904253] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1508.905170] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.905397] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.905924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1508.906533] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd4529bd-b684-42d2-9f16-8d46e10b51f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.913875] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1508.913875] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c34f46-19ff-094a-25bf-595629f9299c" [ 1508.913875] env[62525]: _type = "Task" [ 1508.913875] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.927092] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c34f46-19ff-094a-25bf-595629f9299c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.007282] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781362, 'name': ReconfigVM_Task, 'duration_secs': 0.477563} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.007282] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Reconfigured VM instance instance-00000039 to attach disk [datastore1] cafae62e-b001-4ee0-8e89-4da9c60cf488/cafae62e-b001-4ee0-8e89-4da9c60cf488.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1509.007801] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-857a39cf-b303-411a-a5e0-b26b3c6a5cd0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.016704] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1509.016704] env[62525]: value = "task-1781363" [ 1509.016704] env[62525]: _type = "Task" [ 1509.016704] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.026494] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781363, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.429895] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c34f46-19ff-094a-25bf-595629f9299c, 'name': SearchDatastore_Task, 'duration_secs': 0.011436} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.432825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.433180] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1509.433590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.433803] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.434067] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1509.435565] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f7981ef-8278-4072-a307-3e3e8f7d15d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.445718] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1509.445879] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1509.449481] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18ad7d2-3ee8-4d49-a2aa-e4eb15a8e0d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.457797] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1509.457797] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5222e5d4-fd33-c7af-9318-da72cd37466b" [ 1509.457797] env[62525]: _type = "Task" [ 1509.457797] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.470400] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5222e5d4-fd33-c7af-9318-da72cd37466b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.470400] env[62525]: DEBUG nova.network.neutron [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updated VIF entry in instance network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1509.470400] env[62525]: DEBUG nova.network.neutron [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.529924] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781363, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.612346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7831cdf4-d956-4bbf-b229-d0ea4187214a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.622432] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47dba21-32bc-437f-9f6d-d6b97191d818 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.653426] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf8528f-9aa1-4940-ab88-0c27c6061da7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.661830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ebafe4-adb3-4e5d-a106-f499810afec7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.680106] env[62525]: DEBUG nova.compute.provider_tree [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.969606] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5222e5d4-fd33-c7af-9318-da72cd37466b, 'name': SearchDatastore_Task, 'duration_secs': 0.010865} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.970492] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-874982bf-366b-4ed0-87d5-d9992e1c8176 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.973591] env[62525]: DEBUG oslo_concurrency.lockutils [req-6507252e-04ee-408e-bb4c-eea6fdaba8e7 req-4c0bf5a0-b7e0-4429-897a-b48fe55fc6fb service nova] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.978047] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1509.978047] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52520aa3-74ce-0115-9633-960cc04c291a" [ 1509.978047] env[62525]: _type = "Task" [ 1509.978047] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.988621] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52520aa3-74ce-0115-9633-960cc04c291a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.028332] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781363, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.183650] env[62525]: DEBUG nova.scheduler.client.report [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1510.489819] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52520aa3-74ce-0115-9633-960cc04c291a, 'name': SearchDatastore_Task, 'duration_secs': 0.01433} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.490327] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.490441] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394/8d8b8a9e-c9ad-42d3-8a71-9f6e62206394.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1510.490766] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b238e57-6009-4832-8684-70a955963e7c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.498661] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1510.498661] env[62525]: value = "task-1781364" [ 1510.498661] env[62525]: _type = "Task" [ 1510.498661] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.508013] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.528162] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781363, 'name': Rename_Task, 'duration_secs': 1.200284} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.528466] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1510.528734] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db4d6e23-6027-4158-b5f8-854a70ca27be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.537139] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1510.537139] env[62525]: value = "task-1781365" [ 1510.537139] env[62525]: _type = "Task" [ 1510.537139] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.546190] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.689134] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.690091] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.239s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.690434] env[62525]: DEBUG nova.objects.instance [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'resources' on Instance uuid d8c7d102-46e6-40fe-a864-a72590af4982 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1511.009577] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477001} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.009874] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394/8d8b8a9e-c9ad-42d3-8a71-9f6e62206394.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1511.010100] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1511.010394] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe39a3f6-d865-45d8-8cee-e2889273f23b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.017313] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1511.017313] env[62525]: value = "task-1781366" [ 1511.017313] env[62525]: _type = "Task" [ 1511.017313] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.025570] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.047600] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781365, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.072757] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.073101] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.192669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "16988b75-35eb-4e69-969f-4811fd255f57" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.192970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "16988b75-35eb-4e69-969f-4811fd255f57" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.505276] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e84a1a8-db3f-4db8-a87e-aa5cde3b9946 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.513260] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54071dff-6d90-409f-ab45-71d175a80422 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.550056] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daffd166-8b62-400e-bfc0-65331690b264 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.555412] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091085} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.556344] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1511.557194] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b337c0-f62b-4840-8ba0-341bf5fab529 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.564741] env[62525]: DEBUG oslo_vmware.api [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781365, 'name': PowerOnVM_Task, 'duration_secs': 0.641712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.566232] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e41912-7b23-4807-baf0-48b088272b65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.570140] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.570548] env[62525]: INFO nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Took 10.34 seconds to spawn the instance on the hypervisor. [ 1511.570644] env[62525]: DEBUG nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1511.580008] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9b4d71-659c-4609-9aed-e7b37b6b68f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.582713] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1511.594022] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394/8d8b8a9e-c9ad-42d3-8a71-9f6e62206394.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1511.595338] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc374bf1-5ac7-49dd-b1dd-9710bab152f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.619359] env[62525]: DEBUG nova.compute.provider_tree [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.626212] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1511.626212] env[62525]: value = "task-1781367" [ 1511.626212] env[62525]: _type = "Task" [ 1511.626212] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.634928] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.698138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "16988b75-35eb-4e69-969f-4811fd255f57" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.698659] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1512.112759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.126078] env[62525]: DEBUG nova.scheduler.client.report [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1512.138704] env[62525]: INFO nova.compute.manager [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Took 41.09 seconds to build instance. [ 1512.144192] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.204319] env[62525]: DEBUG nova.compute.utils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.207575] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1512.207897] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1512.208086] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1512.210370] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a55547-3510-4eed-b835-963186ff46b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.217107] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1512.217234] env[62525]: ERROR oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk due to incomplete transfer. [ 1512.217980] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-138b9e7e-5dc8-450a-a11c-e17c26d477c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.226528] env[62525]: DEBUG oslo_vmware.rw_handles [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b25330-6662-5cca-0f8e-b044226544c2/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1512.226779] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Uploaded image bd6d630f-1173-4f94-9676-057d6f46f208 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1512.229311] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1512.229868] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-28796490-a2bf-4853-9650-4b2ca7637f6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.235755] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1512.235755] env[62525]: value = "task-1781368" [ 1512.235755] env[62525]: _type = "Task" [ 1512.235755] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.245354] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781368, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.261237] env[62525]: DEBUG nova.policy [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0d57471762845309e849e7e5825d755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fb8ad224b38469f92dc2be1417c7d20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1512.528288] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Successfully created port: 9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.634685] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.640952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.482s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.641234] env[62525]: DEBUG nova.objects.instance [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1512.644602] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76c51251-d52f-477a-8647-33c0751df30a tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.621s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.650892] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781367, 'name': ReconfigVM_Task, 'duration_secs': 0.758369} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.651034] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394/8d8b8a9e-c9ad-42d3-8a71-9f6e62206394.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1512.651710] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c7a6408-5029-44bc-80fc-c00e2945250e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.660176] env[62525]: INFO nova.scheduler.client.report [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance d8c7d102-46e6-40fe-a864-a72590af4982 [ 1512.662327] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1512.662327] env[62525]: value = "task-1781369" [ 1512.662327] env[62525]: _type = "Task" [ 1512.662327] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.675716] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781369, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.715236] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1512.748738] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781368, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.174626] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781369, 'name': Rename_Task, 'duration_secs': 0.222228} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.174626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5158c124-872b-48b5-8a2c-c48f6a754551 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "d8c7d102-46e6-40fe-a864-a72590af4982" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.739s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.175407] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1513.175800] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-326fa900-3b4b-400d-9fdb-07acf656dfca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.183363] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1513.183363] env[62525]: value = "task-1781370" [ 1513.183363] env[62525]: _type = "Task" [ 1513.183363] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.200454] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781370, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.247591] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781368, 'name': Destroy_Task, 'duration_secs': 0.749445} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.247800] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Destroyed the VM [ 1513.248045] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1513.248294] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-63716b4e-35eb-4382-81d2-7e231616a8e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.256431] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1513.256431] env[62525]: value = "task-1781371" [ 1513.256431] env[62525]: _type = "Task" [ 1513.256431] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.267780] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781371, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.649961] env[62525]: DEBUG oslo_concurrency.lockutils [None req-260145ae-5be1-4ee7-b6de-3b8d95cabd07 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.651159] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.787s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.652696] env[62525]: INFO nova.compute.claims [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1513.694880] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781370, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.726542] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1513.748058] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1513.748339] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1513.748507] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.748707] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1513.748855] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.749017] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1513.749244] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1513.749432] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1513.749612] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1513.749775] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1513.749955] env[62525]: DEBUG nova.virt.hardware [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1513.750918] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725abc73-97d0-4927-b62a-fda6e9becd45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.762468] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a09a07f-17e1-4d06-8adf-ba984ef672bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.767104] env[62525]: DEBUG nova.compute.manager [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1513.767855] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b16659d-cff9-43f6-8dc0-b93d731dd812 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.775559] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781371, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.977217] env[62525]: DEBUG nova.compute.manager [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Received event network-vif-plugged-9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1513.977422] env[62525]: DEBUG oslo_concurrency.lockutils [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] Acquiring lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.977757] env[62525]: DEBUG oslo_concurrency.lockutils [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] Lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.978045] env[62525]: DEBUG oslo_concurrency.lockutils [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] Lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.978321] env[62525]: DEBUG nova.compute.manager [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] No waiting events found dispatching network-vif-plugged-9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1513.978608] env[62525]: WARNING nova.compute.manager [req-e9c61fad-804d-4296-8114-329fdb4bcc7f req-1ed5cf30-93f3-491e-bd8a-4a5d68c8cb45 service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Received unexpected event network-vif-plugged-9ca7e6dd-06d1-4821-98e1-44ca72b410f2 for instance with vm_state building and task_state spawning. [ 1514.000270] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.000730] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.083788] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Successfully updated port: 9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1514.195737] env[62525]: DEBUG oslo_vmware.api [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781370, 'name': PowerOnVM_Task, 'duration_secs': 0.880215} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.196145] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1514.197026] env[62525]: INFO nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1514.197026] env[62525]: DEBUG nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1514.197164] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5859be92-673d-4107-8514-edb6cb880ade {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.269635] env[62525]: DEBUG oslo_vmware.api [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781371, 'name': RemoveSnapshot_Task, 'duration_secs': 0.821441} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.269849] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1514.270093] env[62525]: INFO nova.compute.manager [None req-d121580f-1a56-4ed9-b53d-7c8ce8256f2c tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Took 15.98 seconds to snapshot the instance on the hypervisor. [ 1514.290047] env[62525]: INFO nova.compute.manager [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] instance snapshotting [ 1514.292849] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd53604d-424d-4cd4-a35b-fbe58e8ea7dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.313564] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd219a1-0067-4f3c-833e-4712910ae3b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.504397] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1514.589481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.589658] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquired lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.589808] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1514.717504] env[62525]: INFO nova.compute.manager [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Took 42.55 seconds to build instance. [ 1514.824450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1514.824708] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4f822324-7c31-4185-abd3-62d35b97e1bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.840520] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1514.840520] env[62525]: value = "task-1781372" [ 1514.840520] env[62525]: _type = "Task" [ 1514.840520] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.851715] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.029054] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.035278] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e8ea6c-2fed-4d92-aea5-579acd3caa60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.043235] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d6ef20-d361-4e95-82c6-807c16770872 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.081239] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004522c5-9d0b-4b37-b445-1685bcb7be59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.088792] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69e291e-8208-4789-b058-d5de185ba6ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.105199] env[62525]: DEBUG nova.compute.provider_tree [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.141469] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1515.219392] env[62525]: DEBUG oslo_concurrency.lockutils [None req-755620ab-5c9d-4f8c-bd26-ef7bc0b1b894 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.243s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.352744] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.563652] env[62525]: DEBUG nova.network.neutron [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Updating instance_info_cache with network_info: [{"id": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "address": "fa:16:3e:5d:2c:de", "network": {"id": "2756041e-f2a1-4c5a-8799-acd50ea2151d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-275613397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb8ad224b38469f92dc2be1417c7d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ca7e6dd-06", "ovs_interfaceid": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.607942] env[62525]: DEBUG nova.scheduler.client.report [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1515.853862] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.066435] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Releasing lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.066833] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Instance network_info: |[{"id": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "address": "fa:16:3e:5d:2c:de", "network": {"id": "2756041e-f2a1-4c5a-8799-acd50ea2151d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-275613397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb8ad224b38469f92dc2be1417c7d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ca7e6dd-06", "ovs_interfaceid": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1516.067306] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:2c:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ca7e6dd-06d1-4821-98e1-44ca72b410f2', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1516.075515] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Creating folder: Project (2fb8ad224b38469f92dc2be1417c7d20). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1516.079069] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fb1db41-13c7-4b96-8a66-05a1363e8655 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.091201] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Created folder: Project (2fb8ad224b38469f92dc2be1417c7d20) in parent group-v369553. [ 1516.091434] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Creating folder: Instances. Parent ref: group-v369721. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1516.091701] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-471c8e34-02ea-4427-a9da-00bef985483b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.100652] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Created folder: Instances in parent group-v369721. [ 1516.100907] env[62525]: DEBUG oslo.service.loopingcall [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.101122] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1516.101323] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3a3c34d-4da8-4deb-b6aa-0fe8f57dbf38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.118707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.119282] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1516.125021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.823s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.125021] env[62525]: INFO nova.compute.claims [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.132141] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1516.132141] env[62525]: value = "task-1781375" [ 1516.132141] env[62525]: _type = "Task" [ 1516.132141] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.143113] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781375, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.313318] env[62525]: DEBUG nova.compute.manager [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Received event network-changed-9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1516.313604] env[62525]: DEBUG nova.compute.manager [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Refreshing instance network info cache due to event network-changed-9ca7e6dd-06d1-4821-98e1-44ca72b410f2. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1516.313723] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Acquiring lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.313858] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Acquired lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.314079] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Refreshing network info cache for port 9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1516.352561] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781372, 'name': CreateSnapshot_Task, 'duration_secs': 1.448109} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.352999] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1516.353555] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a01d551-b708-4733-b384-f890bb954aff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.629058] env[62525]: DEBUG nova.compute.utils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1516.632306] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1516.632489] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1516.646686] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781375, 'name': CreateVM_Task, 'duration_secs': 0.447323} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.646686] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1516.647223] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.647467] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.647864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1516.648520] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8011243-61ae-42f7-bcd1-17294460d545 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.652858] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1516.652858] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527ad7d7-c525-5904-53ba-879a700d3af1" [ 1516.652858] env[62525]: _type = "Task" [ 1516.652858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.661268] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ad7d7-c525-5904-53ba-879a700d3af1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.677883] env[62525]: DEBUG nova.policy [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '772ef709f1574e55a426ecaf5abc2d7b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e3f3b0c58ed4bffba386aad34b5ae37', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1516.689088] env[62525]: DEBUG nova.compute.manager [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1516.689994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a452064-6193-4aec-945a-99e16457c813 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.870564] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1516.870875] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1359a84c-5282-4d0a-a2af-0e4d8fc623ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.879594] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1516.879594] env[62525]: value = "task-1781376" [ 1516.879594] env[62525]: _type = "Task" [ 1516.879594] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.888401] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781376, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.050029] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Successfully created port: 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1517.129951] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Updated VIF entry in instance network info cache for port 9ca7e6dd-06d1-4821-98e1-44ca72b410f2. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1517.130350] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Updating instance_info_cache with network_info: [{"id": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "address": "fa:16:3e:5d:2c:de", "network": {"id": "2756041e-f2a1-4c5a-8799-acd50ea2151d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-275613397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb8ad224b38469f92dc2be1417c7d20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ca7e6dd-06", "ovs_interfaceid": "9ca7e6dd-06d1-4821-98e1-44ca72b410f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.132475] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1517.172348] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ad7d7-c525-5904-53ba-879a700d3af1, 'name': SearchDatastore_Task, 'duration_secs': 0.011404} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.174217] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.175792] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1517.175792] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.175792] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.175792] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.175792] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ee2a0e2-baf0-4c23-89ad-96be1907d4eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.184962] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.185159] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1517.187366] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5830162-d4ab-4eff-8ad1-91c9fadea6e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.197336] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1517.197336] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528c23a0-6581-b109-8ea7-65c1f82b1dae" [ 1517.197336] env[62525]: _type = "Task" [ 1517.197336] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.201130] env[62525]: INFO nova.compute.manager [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] instance snapshotting [ 1517.204967] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e062f7-76cb-4ee3-8bed-ef60d976d6c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.214220] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528c23a0-6581-b109-8ea7-65c1f82b1dae, 'name': SearchDatastore_Task, 'duration_secs': 0.010078} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.215477] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-060ef3b4-b7b0-413c-92be-a5c450e662f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.234214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a0b337-603b-49c4-9d22-dfc5e23bc3ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.238247] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1517.238247] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5268e113-20a8-bc72-ec41-0a0a4be4b185" [ 1517.238247] env[62525]: _type = "Task" [ 1517.238247] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.251086] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e113-20a8-bc72-ec41-0a0a4be4b185, 'name': SearchDatastore_Task, 'duration_secs': 0.010112} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.253667] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.253919] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] dfa4b57e-6219-42eb-b257-263124f9a980/dfa4b57e-6219-42eb-b257-263124f9a980.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1517.254794] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ab99a51-7c0b-49e1-83aa-9db297aa4a14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.261789] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1517.261789] env[62525]: value = "task-1781377" [ 1517.261789] env[62525]: _type = "Task" [ 1517.261789] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.275509] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.389627] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781376, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.633029] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Releasing lock "refresh_cache-dfa4b57e-6219-42eb-b257-263124f9a980" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.633175] env[62525]: DEBUG nova.compute.manager [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.633312] env[62525]: DEBUG nova.compute.manager [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing instance network info cache due to event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1517.633525] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.633671] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.633832] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.657781] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186be602-60b9-47dc-a743-4145d4adb09b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.670069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf55445-21c8-4953-9837-ded755973f7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.704508] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ff0a87-3768-4ede-8ebe-35402b4a38eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.714880] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cd9f81-04b7-4155-b7a0-f94443c463a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.736818] env[62525]: DEBUG nova.compute.provider_tree [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.747856] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1517.749017] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-efff0a2e-1227-465c-b205-87270a96993c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.756083] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1517.756083] env[62525]: value = "task-1781378" [ 1517.756083] env[62525]: _type = "Task" [ 1517.756083] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.765853] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.775050] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505209} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.775369] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] dfa4b57e-6219-42eb-b257-263124f9a980/dfa4b57e-6219-42eb-b257-263124f9a980.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.775657] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.776010] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e308a570-996e-45ae-8951-5b09640c9ea0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.782550] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1517.782550] env[62525]: value = "task-1781379" [ 1517.782550] env[62525]: _type = "Task" [ 1517.782550] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.790605] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.891653] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781376, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.146568] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1518.173031] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1518.173031] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1518.174108] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.174366] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1518.174535] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.174689] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1518.174899] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1518.175072] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1518.175244] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1518.175410] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1518.175582] env[62525]: DEBUG nova.virt.hardware [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1518.176739] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76119d40-5527-47a8-8a4c-380ca23a2062 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.187085] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc84fce-521a-4732-92eb-3648185bd820 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.242775] env[62525]: DEBUG nova.scheduler.client.report [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1518.266071] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.292858] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068161} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.293161] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1518.293994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9d867-1106-45d8-baa9-7c3b90297e5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.316613] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] dfa4b57e-6219-42eb-b257-263124f9a980/dfa4b57e-6219-42eb-b257-263124f9a980.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1518.316877] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ae736b8-0eba-43f0-bdc7-0649a572324a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.338755] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1518.338755] env[62525]: value = "task-1781380" [ 1518.338755] env[62525]: _type = "Task" [ 1518.338755] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.349488] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.390462] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781376, 'name': CloneVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.456113] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updated VIF entry in instance network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.456509] env[62525]: DEBUG nova.network.neutron [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.560119] env[62525]: DEBUG nova.compute.manager [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-vif-plugged-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1518.560329] env[62525]: DEBUG oslo_concurrency.lockutils [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] Acquiring lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.560600] env[62525]: DEBUG oslo_concurrency.lockutils [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.560736] env[62525]: DEBUG oslo_concurrency.lockutils [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.560915] env[62525]: DEBUG nova.compute.manager [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] No waiting events found dispatching network-vif-plugged-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1518.561243] env[62525]: WARNING nova.compute.manager [req-a26b8c0a-b073-4701-9e56-2e0690f19768 req-5d516600-3eba-478b-9fff-445f03e46a47 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received unexpected event network-vif-plugged-0a6eef35-6265-42d0-b939-85c1984339a4 for instance with vm_state building and task_state spawning. [ 1518.651443] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Successfully updated port: 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1518.745136] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.745566] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1518.748437] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.124s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.748658] env[62525]: DEBUG nova.objects.instance [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1518.765615] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781378, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.850306] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781380, 'name': ReconfigVM_Task, 'duration_secs': 0.27927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.850593] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Reconfigured VM instance instance-0000003b to attach disk [datastore1] dfa4b57e-6219-42eb-b257-263124f9a980/dfa4b57e-6219-42eb-b257-263124f9a980.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.851498] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90275eaa-ee37-4615-ba9f-3b9d6488703d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.857631] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1518.857631] env[62525]: value = "task-1781381" [ 1518.857631] env[62525]: _type = "Task" [ 1518.857631] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.867322] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781381, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.890389] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781376, 'name': CloneVM_Task, 'duration_secs': 1.542918} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.890648] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Created linked-clone VM from snapshot [ 1518.891376] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9136f9d-30af-48bd-a16d-5371d121112a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.898423] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Uploading image f384cb86-ee9a-480a-89e0-d09d86894f5f {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1518.918954] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1518.918954] env[62525]: value = "vm-369725" [ 1518.918954] env[62525]: _type = "VirtualMachine" [ 1518.918954] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1518.919227] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9d6e5e9c-293c-4b09-b3d5-53644019545d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.925597] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease: (returnval){ [ 1518.925597] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52661a6e-9fb2-f062-b057-fda8bbb5a86f" [ 1518.925597] env[62525]: _type = "HttpNfcLease" [ 1518.925597] env[62525]: } obtained for exporting VM: (result){ [ 1518.925597] env[62525]: value = "vm-369725" [ 1518.925597] env[62525]: _type = "VirtualMachine" [ 1518.925597] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1518.925853] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the lease: (returnval){ [ 1518.925853] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52661a6e-9fb2-f062-b057-fda8bbb5a86f" [ 1518.925853] env[62525]: _type = "HttpNfcLease" [ 1518.925853] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1518.931909] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1518.931909] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52661a6e-9fb2-f062-b057-fda8bbb5a86f" [ 1518.931909] env[62525]: _type = "HttpNfcLease" [ 1518.931909] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1518.959711] env[62525]: DEBUG oslo_concurrency.lockutils [req-48dd2e37-51e8-48e0-bbc9-cc94822d2429 req-fbdfecd2-52c1-4b05-9689-c0e51b6dacfe service nova] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.154628] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.154800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.154991] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.253881] env[62525]: DEBUG nova.compute.utils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.257894] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1519.258107] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1519.269482] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781378, 'name': CreateSnapshot_Task, 'duration_secs': 1.364139} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.269719] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1519.270450] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb9cb51-72b4-4a90-9bb3-ab533b260abd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.304206] env[62525]: DEBUG nova.policy [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fc90947a4cd4223a27cb5722752b5d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c584fb3541c4ea49269ecfcf063d746', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1519.368560] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781381, 'name': Rename_Task, 'duration_secs': 0.150592} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.368831] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1519.369085] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d00c582-ab4d-44b2-aac4-cb703b9b9dda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.375169] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1519.375169] env[62525]: value = "task-1781383" [ 1519.375169] env[62525]: _type = "Task" [ 1519.375169] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.383127] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.433801] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1519.433801] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52661a6e-9fb2-f062-b057-fda8bbb5a86f" [ 1519.433801] env[62525]: _type = "HttpNfcLease" [ 1519.433801] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1519.434157] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1519.434157] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52661a6e-9fb2-f062-b057-fda8bbb5a86f" [ 1519.434157] env[62525]: _type = "HttpNfcLease" [ 1519.434157] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1519.434830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2591aa0d-cb6f-44eb-8749-9f7eda3ee031 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.443700] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1519.443964] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1519.554784] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0c6ed128-c518-4ae8-b215-08a572aa91c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.599857] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Successfully created port: cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1519.706823] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1519.761797] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eb732040-2c50-48d6-ace0-dab628ce5266 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.763391] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1519.766945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.804s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.766945] env[62525]: DEBUG nova.objects.instance [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lazy-loading 'resources' on Instance uuid 70313696-a9cc-499c-b9e6-329a71c4b915 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1519.789984] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1519.793810] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6cb07dde-74f8-472b-bd0a-6979f1473b53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.804362] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1519.804362] env[62525]: value = "task-1781384" [ 1519.804362] env[62525]: _type = "Task" [ 1519.804362] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.814527] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781384, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.888981] env[62525]: DEBUG oslo_vmware.api [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781383, 'name': PowerOnVM_Task, 'duration_secs': 0.485515} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.889276] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1519.889522] env[62525]: INFO nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Took 6.16 seconds to spawn the instance on the hypervisor. [ 1519.889705] env[62525]: DEBUG nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1519.890650] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512e7ecb-7435-49e8-a159-7cf314884fbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.936385] env[62525]: DEBUG nova.network.neutron [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.270109] env[62525]: INFO nova.virt.block_device [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Booting with volume 29e63c2d-fe5c-4937-8bbe-4a45dbe8493c at /dev/sda [ 1520.310612] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e30f0428-1046-4f60-8f3e-6ac182b9ea92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.320413] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781384, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.329075] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5dc91c-0f6d-4ae7-8dd5-c95eb563b368 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.378137] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a5cc904-d74a-4921-873e-e113615aa31b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.387442] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35cd8b8-fe84-448a-a7e1-91d39cc83aa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.411794] env[62525]: INFO nova.compute.manager [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Took 37.85 seconds to build instance. [ 1520.437914] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b307846-e89f-481b-935f-4f3101f82d1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.441940] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.441940] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Instance network_info: |[{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1520.442430] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:e8:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a6eef35-6265-42d0-b939-85c1984339a4', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1520.450077] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Creating folder: Project (3e3f3b0c58ed4bffba386aad34b5ae37). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1520.451133] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2af2668a-4b51-48f4-8b6a-96d96b23094b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.458052] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b2e391-3bb9-48c6-b4ac-43eab04cebef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.466971] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Created folder: Project (3e3f3b0c58ed4bffba386aad34b5ae37) in parent group-v369553. [ 1520.467110] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Creating folder: Instances. Parent ref: group-v369728. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1520.467738] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65100830-0ae4-49a9-82c4-36f5353507a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.479535] env[62525]: DEBUG nova.virt.block_device [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating existing volume attachment record: 4b9121b9-8ead-4df4-99a4-f58bf6397cad {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1520.548223] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Created folder: Instances in parent group-v369728. [ 1520.548664] env[62525]: DEBUG oslo.service.loopingcall [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1520.555602] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1520.555602] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0342ebb-ffdb-4219-b22f-446521b3763e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.581659] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.581659] env[62525]: value = "task-1781387" [ 1520.581659] env[62525]: _type = "Task" [ 1520.581659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.591536] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781387, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.649507] env[62525]: DEBUG nova.compute.manager [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1520.649664] env[62525]: DEBUG nova.compute.manager [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing instance network info cache due to event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1520.649908] env[62525]: DEBUG oslo_concurrency.lockutils [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.651116] env[62525]: DEBUG oslo_concurrency.lockutils [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.651116] env[62525]: DEBUG nova.network.neutron [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1520.751936] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d378e7d-6a0a-44c4-baa2-afeeed7807a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.760285] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3037c59c-b263-47d3-9d37-c6b0727669cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.794083] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e540aa3c-2090-4284-bc1c-ac53e2334e09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.802677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7c9d65-6ae0-42fc-8fc1-d4d5c2919bcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.819405] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781384, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.828086] env[62525]: DEBUG nova.compute.provider_tree [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1520.914684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-00a38d85-c2dc-4d52-b70e-babe4f7efd15 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.370s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.093769] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781387, 'name': CreateVM_Task, 'duration_secs': 0.437162} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.094065] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1521.094802] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.095072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.095746] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.096096] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba7a6eb0-dfd0-4853-b1c7-6c7e769c908a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.101343] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1521.101343] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52559666-8643-4198-be52-293f4bfe1e4d" [ 1521.101343] env[62525]: _type = "Task" [ 1521.101343] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.109805] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52559666-8643-4198-be52-293f4bfe1e4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.180240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "dfa4b57e-6219-42eb-b257-263124f9a980" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.180505] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.180783] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.181345] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.181530] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.184579] env[62525]: INFO nova.compute.manager [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Terminating instance [ 1521.188898] env[62525]: DEBUG nova.compute.manager [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1521.189116] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1521.189960] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54f91f1-84de-4ca1-a7c2-a54b5aa4db5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.198209] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1521.198462] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-feadffea-5ed9-4092-b5b0-27a1487f68e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.206626] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1521.206626] env[62525]: value = "task-1781388" [ 1521.206626] env[62525]: _type = "Task" [ 1521.206626] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.216060] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.320380] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781384, 'name': CloneVM_Task, 'duration_secs': 1.410551} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.320767] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Created linked-clone VM from snapshot [ 1521.324034] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bb17ee-5866-4168-8cb6-495625ae4536 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.329512] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Uploading image 3655d18e-a52b-4727-bd66-daf788aa6937 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1521.333651] env[62525]: DEBUG nova.scheduler.client.report [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.358152] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1521.358152] env[62525]: value = "vm-369727" [ 1521.358152] env[62525]: _type = "VirtualMachine" [ 1521.358152] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1521.358152] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2d2d8dda-fd08-4ad3-9f4f-982184cdb71b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.366850] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease: (returnval){ [ 1521.366850] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52316e46-8605-9528-7017-27a96b10f0ec" [ 1521.366850] env[62525]: _type = "HttpNfcLease" [ 1521.366850] env[62525]: } obtained for exporting VM: (result){ [ 1521.366850] env[62525]: value = "vm-369727" [ 1521.366850] env[62525]: _type = "VirtualMachine" [ 1521.366850] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1521.367553] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the lease: (returnval){ [ 1521.367553] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52316e46-8605-9528-7017-27a96b10f0ec" [ 1521.367553] env[62525]: _type = "HttpNfcLease" [ 1521.367553] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1521.376716] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1521.376716] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52316e46-8605-9528-7017-27a96b10f0ec" [ 1521.376716] env[62525]: _type = "HttpNfcLease" [ 1521.376716] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1521.474981] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Successfully updated port: cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1521.515923] env[62525]: DEBUG nova.network.neutron [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updated VIF entry in instance network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1521.516328] env[62525]: DEBUG nova.network.neutron [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.612068] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52559666-8643-4198-be52-293f4bfe1e4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.612396] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.612635] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1521.612869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.613022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.613204] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1521.613480] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ec71d5b-b014-4012-a63d-4d0b5303d437 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.622244] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1521.622442] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1521.623206] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706dfea4-c707-4950-aebe-0916b80553b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.628694] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1521.628694] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e40179-8239-41f2-fcd6-f554b10fa74b" [ 1521.628694] env[62525]: _type = "Task" [ 1521.628694] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.639034] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e40179-8239-41f2-fcd6-f554b10fa74b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.717915] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781388, 'name': PowerOffVM_Task, 'duration_secs': 0.251814} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.718207] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.718377] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1521.718650] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f46baf55-05ad-4dbb-837e-7fe6dbc86952 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.794224] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1521.794495] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1521.794738] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Deleting the datastore file [datastore1] dfa4b57e-6219-42eb-b257-263124f9a980 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1521.795206] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac0431ef-2540-4f0f-a519-1ce7fe01b9e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.802101] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for the task: (returnval){ [ 1521.802101] env[62525]: value = "task-1781391" [ 1521.802101] env[62525]: _type = "Task" [ 1521.802101] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.811789] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.847361] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.850028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.505s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.850328] env[62525]: DEBUG nova.objects.instance [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lazy-loading 'resources' on Instance uuid 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1521.874551] env[62525]: INFO nova.scheduler.client.report [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted allocations for instance 70313696-a9cc-499c-b9e6-329a71c4b915 [ 1521.877480] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1521.877480] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52316e46-8605-9528-7017-27a96b10f0ec" [ 1521.877480] env[62525]: _type = "HttpNfcLease" [ 1521.877480] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1521.880143] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1521.880143] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52316e46-8605-9528-7017-27a96b10f0ec" [ 1521.880143] env[62525]: _type = "HttpNfcLease" [ 1521.880143] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1521.881183] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b43f27-3588-4ab9-8242-c640ea7d67ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.888945] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1521.889625] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1521.978922] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.980466] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquired lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.980466] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1522.014317] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2fb3d96b-6ff8-4577-b576-79dcbdea7f14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.018471] env[62525]: DEBUG oslo_concurrency.lockutils [req-c0194ac4-d7ea-4ca0-955f-4c03994eba13 req-063a1e2a-5c90-4d87-9687-6bcc24b50258 service nova] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.140381] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e40179-8239-41f2-fcd6-f554b10fa74b, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.141230] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dce83ca2-1c0a-4209-af99-9eba62ba8d86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.147051] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1522.147051] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5228e495-27bb-0d6d-e24e-0cbb5fde919f" [ 1522.147051] env[62525]: _type = "Task" [ 1522.147051] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.155447] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5228e495-27bb-0d6d-e24e-0cbb5fde919f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.313159] env[62525]: DEBUG oslo_vmware.api [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Task: {'id': task-1781391, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147682} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.313442] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.313631] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1522.313811] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1522.314030] env[62525]: INFO nova.compute.manager [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1522.314286] env[62525]: DEBUG oslo.service.loopingcall [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1522.314475] env[62525]: DEBUG nova.compute.manager [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1522.314571] env[62525]: DEBUG nova.network.neutron [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1522.387525] env[62525]: DEBUG oslo_concurrency.lockutils [None req-74616c35-8abc-497d-8373-829ade8c557a tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "70313696-a9cc-499c-b9e6-329a71c4b915" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.796s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.539495] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1522.583960] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1522.583960] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.583960] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.583960] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.584177] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.584177] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.584412] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.584504] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.584669] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.584832] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.585115] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.585202] env[62525]: DEBUG nova.virt.hardware [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.586140] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00447864-2470-42cd-b149-8842fd8fdd04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.600073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d23000a-6b29-42e2-ab1c-5c2c9b955309 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.664281] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5228e495-27bb-0d6d-e24e-0cbb5fde919f, 'name': SearchDatastore_Task, 'duration_secs': 0.009343} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.667934] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.668373] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/24d38b8e-c48b-4562-817e-7ae57658fb1b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1522.669254] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6e476f1-910e-4d58-87d6-f21973766bd7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.678105] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1522.678105] env[62525]: value = "task-1781392" [ 1522.678105] env[62525]: _type = "Task" [ 1522.678105] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.690935] env[62525]: DEBUG nova.compute.manager [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Received event network-vif-plugged-cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.691340] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Acquiring lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.691666] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.691980] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.692327] env[62525]: DEBUG nova.compute.manager [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] No waiting events found dispatching network-vif-plugged-cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1522.692749] env[62525]: WARNING nova.compute.manager [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Received unexpected event network-vif-plugged-cf50b3b9-451f-4000-8f23-dd36c9806f5f for instance with vm_state building and task_state spawning. [ 1522.692948] env[62525]: DEBUG nova.compute.manager [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Received event network-changed-cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.693123] env[62525]: DEBUG nova.compute.manager [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Refreshing instance network info cache due to event network-changed-cf50b3b9-451f-4000-8f23-dd36c9806f5f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1522.693297] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Acquiring lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.697969] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.756870] env[62525]: DEBUG nova.network.neutron [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating instance_info_cache with network_info: [{"id": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "address": "fa:16:3e:ae:a4:83", "network": {"id": "3d9bfbda-e322-4a47-bc2a-c0ed67569e3f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1396561609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c584fb3541c4ea49269ecfcf063d746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf50b3b9-45", "ovs_interfaceid": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.781270] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19606aee-94a8-4275-ad65-192dd8e7478b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.790660] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb265950-d6ce-47ff-b082-edc7be2e1180 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.824608] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953ed75a-5678-4423-bae1-ad91b1266f76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.834121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c58b15-5f3d-450f-ac6b-863f1cf97314 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.852467] env[62525]: DEBUG nova.compute.provider_tree [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.188800] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476707} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.189232] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/24d38b8e-c48b-4562-817e-7ae57658fb1b.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1523.189641] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1523.190039] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a6bf38d-9181-4e78-b5dc-11cce8525c1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.195443] env[62525]: DEBUG nova.network.neutron [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.198177] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1523.198177] env[62525]: value = "task-1781393" [ 1523.198177] env[62525]: _type = "Task" [ 1523.198177] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.208030] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.259808] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Releasing lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.260203] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance network_info: |[{"id": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "address": "fa:16:3e:ae:a4:83", "network": {"id": "3d9bfbda-e322-4a47-bc2a-c0ed67569e3f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1396561609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c584fb3541c4ea49269ecfcf063d746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf50b3b9-45", "ovs_interfaceid": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1523.260528] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Acquired lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.260754] env[62525]: DEBUG nova.network.neutron [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Refreshing network info cache for port cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1523.262070] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:a4:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf50b3b9-451f-4000-8f23-dd36c9806f5f', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1523.270405] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Creating folder: Project (7c584fb3541c4ea49269ecfcf063d746). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.274483] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c38019e6-3923-4135-bfe3-fbe7b94c9760 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.287937] env[62525]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1523.287937] env[62525]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62525) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1523.288278] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Folder already exists: Project (7c584fb3541c4ea49269ecfcf063d746). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1523.288572] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Creating folder: Instances. Parent ref: group-v369698. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.288939] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f94e4131-4ce5-4baa-943c-9ac23ab62ca2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.300634] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Created folder: Instances in parent group-v369698. [ 1523.301129] env[62525]: DEBUG oslo.service.loopingcall [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1523.301696] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1523.301696] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f62a722-79f5-4bca-9f44-8a5bd041e0a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.326789] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1523.326789] env[62525]: value = "task-1781396" [ 1523.326789] env[62525]: _type = "Task" [ 1523.326789] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.335151] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781396, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.356309] env[62525]: DEBUG nova.scheduler.client.report [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1523.526959] env[62525]: DEBUG nova.network.neutron [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updated VIF entry in instance network info cache for port cf50b3b9-451f-4000-8f23-dd36c9806f5f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.527487] env[62525]: DEBUG nova.network.neutron [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating instance_info_cache with network_info: [{"id": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "address": "fa:16:3e:ae:a4:83", "network": {"id": "3d9bfbda-e322-4a47-bc2a-c0ed67569e3f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1396561609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c584fb3541c4ea49269ecfcf063d746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf50b3b9-45", "ovs_interfaceid": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.699597] env[62525]: INFO nova.compute.manager [-] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Took 1.38 seconds to deallocate network for instance. [ 1523.714430] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080396} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.714775] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1523.715987] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5298508e-8dc1-45e0-945b-45795be65365 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.748159] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/24d38b8e-c48b-4562-817e-7ae57658fb1b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1523.749246] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9463a179-928a-49ed-ad23-c7d43fa4588c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.772403] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1523.772403] env[62525]: value = "task-1781397" [ 1523.772403] env[62525]: _type = "Task" [ 1523.772403] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.781264] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781397, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.836395] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781396, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.864540] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.867065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.798s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.887653] env[62525]: INFO nova.scheduler.client.report [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Deleted allocations for instance 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6 [ 1524.029808] env[62525]: DEBUG oslo_concurrency.lockutils [req-e6ef0119-7a27-42b0-926e-d3051d06a594 req-1ac80ef8-f74d-45db-a604-cb42a5cc24ea service nova] Releasing lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.210737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.283552] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.336655] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781396, 'name': CreateVM_Task, 'duration_secs': 0.539695} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.338868] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1524.338868] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369708', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'name': 'volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54d1a1ed-0880-4cca-8759-585dc65bdb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'serial': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c'}, 'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '4b9121b9-8ead-4df4-99a4-f58bf6397cad', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62525) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1524.338868] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Root volume attach. Driver type: vmdk {{(pid=62525) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1524.338868] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac87eda-2696-499d-9af0-36ced5ad0b81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.346820] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bdf4db-a947-4355-b2a7-2354ff2ee333 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.353106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4c8577-3e2b-45ec-8ed2-e5e03f2016c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.359325] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ecc0488e-75b9-4c58-95f1-17577e84236a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.366823] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1524.366823] env[62525]: value = "task-1781398" [ 1524.366823] env[62525]: _type = "Task" [ 1524.366823] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.383262] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781398, 'name': RelocateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.396200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b32f9b31-ecb0-437b-9f4a-991cad4aabe2 tempest-ServerPasswordTestJSON-420300715 tempest-ServerPasswordTestJSON-420300715-project-member] Lock "1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.319s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.784798] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781397, 'name': ReconfigVM_Task, 'duration_secs': 0.582989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.785172] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/24d38b8e-c48b-4562-817e-7ae57658fb1b.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1524.785905] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2b4b152-79da-44c1-bfde-8a0a12b7ee1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.793335] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1524.793335] env[62525]: value = "task-1781399" [ 1524.793335] env[62525]: _type = "Task" [ 1524.793335] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.802666] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781399, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.885042] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781398, 'name': RelocateVM_Task, 'duration_secs': 0.384672} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.885611] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1524.885832] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369708', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'name': 'volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54d1a1ed-0880-4cca-8759-585dc65bdb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'serial': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1524.886679] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653fe5bc-5e0c-479e-8e79-83d3dd708829 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.909370] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cf23e6-689e-45be-9637-765f78bb7c14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.913082] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 56cb0d0c-a7dd-4158-8bed-ddff050e0226 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913334] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f93669f2-c59d-4f3f-85a2-a60d714326ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913334] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913483] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913560] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913627] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f589dc1-9244-475f-86d0-4b69b511508b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913737] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8864d73-35e6-490b-a07c-e8cac8baf880 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913849] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.913984] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 61f05e69-5e90-47da-9f47-3651b580a23c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1524.914119] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914214] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914321] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 7a92bac8-9cee-41ed-81e3-08b48432fe7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914429] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 94560d78-071c-419d-ad10-f42a5b2271a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914535] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0067de08-6708-4c7c-a83a-ed9df193d5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914644] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cafae62e-b001-4ee0-8e89-4da9c60cf488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914756] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.914878] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance dfa4b57e-6219-42eb-b257-263124f9a980 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1524.914987] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 24d38b8e-c48b-4562-817e-7ae57658fb1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.915110] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1524.936575] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c/volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.937302] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-999b5fb0-4454-43f1-b831-4f2ffbaa9dbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.958625] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1524.958625] env[62525]: value = "task-1781400" [ 1524.958625] env[62525]: _type = "Task" [ 1524.958625] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.969210] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781400, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.304135] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781399, 'name': Rename_Task, 'duration_secs': 0.198789} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.304418] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1525.304668] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88876446-ba3f-439b-80bc-d101024965f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.311411] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1525.311411] env[62525]: value = "task-1781401" [ 1525.311411] env[62525]: _type = "Task" [ 1525.311411] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.319320] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.420456] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0a7ef997-bda5-452e-abe0-537146bf23f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.469302] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781400, 'name': ReconfigVM_Task, 'duration_secs': 0.44488} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.469581] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Reconfigured VM instance instance-0000003d to attach disk [datastore1] volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c/volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.474705] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb9f9d28-5ee5-4ec8-a752-df905af0a7c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.490896] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1525.490896] env[62525]: value = "task-1781402" [ 1525.490896] env[62525]: _type = "Task" [ 1525.490896] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.499811] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.822419] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781401, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.924639] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance bfb20735-1de9-4741-9d6f-5cd2ffedbca6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1526.001926] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781402, 'name': ReconfigVM_Task, 'duration_secs': 0.184678} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.002272] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369708', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'name': 'volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54d1a1ed-0880-4cca-8759-585dc65bdb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'serial': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1526.002958] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-beb9f40d-54a7-4d33-99ae-9b4c44490ad6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.010008] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1526.010008] env[62525]: value = "task-1781403" [ 1526.010008] env[62525]: _type = "Task" [ 1526.010008] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.019564] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781403, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.323143] env[62525]: DEBUG oslo_vmware.api [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781401, 'name': PowerOnVM_Task, 'duration_secs': 0.69484} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.323435] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1526.323772] env[62525]: INFO nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1526.324038] env[62525]: DEBUG nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1526.324946] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d633ad-d93b-43c0-908a-370ed18d7762 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.428401] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1526.521727] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781403, 'name': Rename_Task, 'duration_secs': 0.202841} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.522019] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1526.522311] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6221b1e-1869-48f2-a2ab-c5358efe85f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.529696] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1526.529696] env[62525]: value = "task-1781404" [ 1526.529696] env[62525]: _type = "Task" [ 1526.529696] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.539191] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.845162] env[62525]: INFO nova.compute.manager [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Took 34.03 seconds to build instance. [ 1526.932599] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1badb7d9-692a-445e-ad47-ebd6e19f8197 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.041278] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781404, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.348038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-25b70f62-c8b4-4511-8b74-efbfde5cd5c6 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.544s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.437416] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6be49426-ddda-461e-908f-593c0904b129 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.543454] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781404, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.941063] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 81fbb354-21f2-43f0-8aa3-e80e10235326 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.941577] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1527.941577] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1528.045395] env[62525]: DEBUG oslo_vmware.api [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781404, 'name': PowerOnVM_Task, 'duration_secs': 1.393268} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.045669] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1528.045879] env[62525]: INFO nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Took 5.46 seconds to spawn the instance on the hypervisor. [ 1528.046076] env[62525]: DEBUG nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1528.046878] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423cd86f-93ef-4d56-8b08-c1a9c969ff56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.110102] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1528.110102] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dd192a-e499-4c7a-98a7-695b2c9e0a10 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.120012] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1528.120189] env[62525]: ERROR oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk due to incomplete transfer. [ 1528.120477] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-632e0da0-6ba0-4b2b-a183-4dd7dbc7a5a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.129561] env[62525]: DEBUG oslo_vmware.rw_handles [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204ffa8-377a-7a74-cf16-14104a99593d/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1528.130391] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Uploaded image f384cb86-ee9a-480a-89e0-d09d86894f5f to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1528.132267] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1528.132836] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-56f57291-6f5a-4e01-9b17-ecd64977cca6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.143535] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1528.143535] env[62525]: value = "task-1781405" [ 1528.143535] env[62525]: _type = "Task" [ 1528.143535] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.152823] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781405, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.285020] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c18c49-9d5b-45cd-a6df-d4fb1edf187d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.293152] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fef9ce-ec13-477e-a3f5-3747c7f2b0bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.327385] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70df4209-ebfd-4403-b4a4-5c6ab6efd03b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.337376] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7b3be6-b4aa-486b-b0b1-b6c93a462fe8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.355234] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.572904] env[62525]: INFO nova.compute.manager [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Took 30.29 seconds to build instance. [ 1528.656610] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781405, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.721578] env[62525]: DEBUG nova.compute.manager [req-c7ec1e1f-a554-4b7b-90d1-2a110488127a req-144a4d8f-4e70-44b5-8ac4-76fdfd0ecb7a service nova] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Received event network-vif-deleted-9ca7e6dd-06d1-4821-98e1-44ca72b410f2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.858247] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.072988] env[62525]: DEBUG oslo_concurrency.lockutils [None req-518f4353-e15d-4d57-8c0f-9a9adb022a02 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.800s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.124084] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "024c7393-de18-4c76-a27e-757710824494" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.124340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.155651] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781405, 'name': Destroy_Task, 'duration_secs': 0.816966} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.156243] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Destroyed the VM [ 1529.157068] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1529.157068] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-792e75c6-f156-4460-85e7-555ebbc7623d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.165798] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1529.165798] env[62525]: value = "task-1781406" [ 1529.165798] env[62525]: _type = "Task" [ 1529.165798] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.177153] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781406, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.364739] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1529.365071] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.498s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.365482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.189s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.365583] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.371440] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.146s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.373099] env[62525]: INFO nova.compute.claims [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1529.417201] env[62525]: INFO nova.scheduler.client.report [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted allocations for instance 61f05e69-5e90-47da-9f47-3651b580a23c [ 1529.474237] env[62525]: INFO nova.compute.manager [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Rescuing [ 1529.474237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.474237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.474237] env[62525]: DEBUG nova.network.neutron [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.630291] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1529.682607] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781406, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.933157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-96c22715-6fb5-4e3c-ac1b-54003583edf3 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "61f05e69-5e90-47da-9f47-3651b580a23c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.940s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.988527] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.988754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.138390] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.138747] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.139506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.139764] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.139966] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.147194] env[62525]: INFO nova.compute.manager [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Terminating instance [ 1530.156029] env[62525]: DEBUG nova.compute.manager [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1530.156348] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.157686] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86c8853-61ef-4ee8-90bc-fdb079f87a88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.164094] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.175723] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.176515] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9d337e7-10c0-4cee-b7ae-e2089248e0b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.187585] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781406, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.188787] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1530.188787] env[62525]: value = "task-1781407" [ 1530.188787] env[62525]: _type = "Task" [ 1530.188787] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.198934] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.269406] env[62525]: DEBUG nova.network.neutron [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.495722] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1530.681107] env[62525]: DEBUG oslo_vmware.api [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781406, 'name': RemoveSnapshot_Task, 'duration_secs': 1.101615} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.685231] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1530.685615] env[62525]: INFO nova.compute.manager [None req-93cdb03a-96f1-4cac-be3f-e29cb7241844 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Took 16.39 seconds to snapshot the instance on the hypervisor. [ 1530.705123] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781407, 'name': PowerOffVM_Task, 'duration_secs': 0.302407} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.709251] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.709492] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.711514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bca37d5-5a02-40e8-9a27-10bed524ffce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.774034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.777335] env[62525]: DEBUG nova.compute.manager [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Received event network-changed-cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.777635] env[62525]: DEBUG nova.compute.manager [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Refreshing instance network info cache due to event network-changed-cf50b3b9-451f-4000-8f23-dd36c9806f5f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1530.777869] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] Acquiring lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.778024] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] Acquired lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.778188] env[62525]: DEBUG nova.network.neutron [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Refreshing network info cache for port cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.797966] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1530.798226] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1530.798459] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1530.802267] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f102ad34-b76e-4a4c-846a-a31a3812a56e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.815507] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1530.815507] env[62525]: value = "task-1781409" [ 1530.815507] env[62525]: _type = "Task" [ 1530.815507] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.836590] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.877029] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b201ffa-efad-46e5-b67b-d443bb6615fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.890312] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759c4ec8-d437-4498-b7a8-663ab559897b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.930426] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01eeb7ed-7eda-40d9-90be-9fd7f744671c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.940356] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf638ff-3c92-4181-80bc-e3b25f3ad910 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.956377] env[62525]: DEBUG nova.compute.provider_tree [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1531.022869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.327279] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.327536] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee57bbb1-0398-4eec-8862-6d6f41a8635b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.334036] env[62525]: DEBUG oslo_vmware.api [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.334161] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.334787] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.334787] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.334787] env[62525]: INFO nova.compute.manager [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1531.334953] env[62525]: DEBUG oslo.service.loopingcall [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.336381] env[62525]: DEBUG nova.compute.manager [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1531.336476] env[62525]: DEBUG nova.network.neutron [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.338334] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1531.338334] env[62525]: value = "task-1781410" [ 1531.338334] env[62525]: _type = "Task" [ 1531.338334] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.348945] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.460263] env[62525]: DEBUG nova.scheduler.client.report [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1531.477932] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1531.479116] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0c7afc-432f-4f21-9d07-be03aec662e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.486688] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1531.486857] env[62525]: ERROR oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk due to incomplete transfer. [ 1531.487119] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6825ba63-0403-4328-95e8-167453007f80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.496088] env[62525]: DEBUG oslo_vmware.rw_handles [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d28635-82a8-9c5e-c5c7-2314963825ce/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1531.496294] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Uploaded image 3655d18e-a52b-4727-bd66-daf788aa6937 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1531.497821] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1531.498098] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ac9588d3-63e2-41d4-9c19-333503b5b421 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.505961] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1531.505961] env[62525]: value = "task-1781411" [ 1531.505961] env[62525]: _type = "Task" [ 1531.505961] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.514980] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781411, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.652736] env[62525]: DEBUG nova.network.neutron [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updated VIF entry in instance network info cache for port cf50b3b9-451f-4000-8f23-dd36c9806f5f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1531.653135] env[62525]: DEBUG nova.network.neutron [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating instance_info_cache with network_info: [{"id": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "address": "fa:16:3e:ae:a4:83", "network": {"id": "3d9bfbda-e322-4a47-bc2a-c0ed67569e3f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1396561609-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c584fb3541c4ea49269ecfcf063d746", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf50b3b9-45", "ovs_interfaceid": "cf50b3b9-451f-4000-8f23-dd36c9806f5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.851655] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781410, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.925684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "2f713b35-9d07-4d25-a333-506fd2469bd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.925928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.969322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.969833] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1531.972826] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.244s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.974652] env[62525]: INFO nova.compute.claims [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1532.017222] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781411, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.156576] env[62525]: DEBUG oslo_concurrency.lockutils [req-1d18ec39-adc6-44f4-aeda-a428dc6205ba req-df6c8428-7b55-4a03-a051-23c203365180 service nova] Releasing lock "refresh_cache-54d1a1ed-0880-4cca-8759-585dc65bdb1a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.214070] env[62525]: DEBUG nova.network.neutron [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.351813] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781410, 'name': PowerOffVM_Task, 'duration_secs': 0.545164} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.352107] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1532.352886] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e855e9-685b-4c4b-87db-457c44d8b4cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.372197] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d9e7ca-5b15-4fce-8e58-9aa59571e6b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.403703] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1532.404016] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-acd6f9bd-ca6e-4101-b36f-b8a4077bc7b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.412938] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1532.412938] env[62525]: value = "task-1781412" [ 1532.412938] env[62525]: _type = "Task" [ 1532.412938] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.422619] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.428908] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1532.478775] env[62525]: DEBUG nova.compute.utils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1532.482538] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1532.482538] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1532.518255] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781411, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.530207] env[62525]: DEBUG nova.policy [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8211cc4b1244ffb9f1ecfb48d040f24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d6ae425ee99473a8e75e1e00ee1c45a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1532.716703] env[62525]: INFO nova.compute.manager [-] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Took 1.38 seconds to deallocate network for instance. [ 1532.777240] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Successfully created port: 1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1532.800438] env[62525]: DEBUG nova.compute.manager [req-405e4eb9-76f8-4bb5-af3a-574c3770b10c req-9fd24cb1-49ad-47d8-a7f0-78a1bf8d40c2 service nova] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Received event network-vif-deleted-fd035d09-ef89-4a5c-ac55-b849f0b4cc48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.924612] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1532.925630] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1532.925630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.925630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.925630] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.925630] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-303f839f-9913-42cd-96e9-a51fcb79dfc1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.935499] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.935663] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1532.938305] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d8a440-1f79-48bc-af6d-275cf19ccff1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.944086] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1532.944086] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5253e332-25f7-d732-3483-64a06b02dce5" [ 1532.944086] env[62525]: _type = "Task" [ 1532.944086] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.955114] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5253e332-25f7-d732-3483-64a06b02dce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.956789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.983011] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1533.022227] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781411, 'name': Destroy_Task, 'duration_secs': 1.041945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.022227] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Destroyed the VM [ 1533.022371] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1533.022587] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f6e53849-82d2-449c-b8cf-ac6a0370a862 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.033358] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1533.033358] env[62525]: value = "task-1781413" [ 1533.033358] env[62525]: _type = "Task" [ 1533.033358] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.042099] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781413, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.225494] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.310231] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ce3621-e3d6-4137-9a26-6befe3b00a43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.320294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b84ed7-708f-4cf0-82ef-9d058196e36a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.350423] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae7e330-a734-4609-b46f-4744ff4528bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.357980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1f4d0f-2e6e-44be-94e1-75be435c4d48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.371159] env[62525]: DEBUG nova.compute.provider_tree [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1533.456628] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5253e332-25f7-d732-3483-64a06b02dce5, 'name': SearchDatastore_Task, 'duration_secs': 0.009326} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.457709] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949e9d57-78c6-499c-a078-7323aad058b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.463902] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1533.463902] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527d6651-6c79-43d9-8b31-6ce7f9cdee15" [ 1533.463902] env[62525]: _type = "Task" [ 1533.463902] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.472753] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527d6651-6c79-43d9-8b31-6ce7f9cdee15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.543772] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781413, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.874884] env[62525]: DEBUG nova.scheduler.client.report [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1533.976268] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527d6651-6c79-43d9-8b31-6ce7f9cdee15, 'name': SearchDatastore_Task, 'duration_secs': 0.011266} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.976533] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.976793] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1533.977068] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6aecc32f-5276-4af0-840d-016455ec5dbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.984626] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1533.984626] env[62525]: value = "task-1781414" [ 1533.984626] env[62525]: _type = "Task" [ 1533.984626] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.992318] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.994446] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1534.021640] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1534.021879] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1534.022106] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1534.022361] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1534.022573] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1534.022772] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1534.023066] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1534.023287] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1534.023531] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1534.023762] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1534.024141] env[62525]: DEBUG nova.virt.hardware [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1534.025604] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3e400c-53b3-48bc-903b-05b7c97acea8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.034285] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e75324-8f8e-477c-928a-5349be1f176f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.054060] env[62525]: DEBUG oslo_vmware.api [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781413, 'name': RemoveSnapshot_Task, 'duration_secs': 0.583836} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.054363] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1534.054595] env[62525]: INFO nova.compute.manager [None req-4c16dc31-f6e6-42a9-85a2-a27a9d7d71c2 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 16.85 seconds to snapshot the instance on the hypervisor. [ 1534.307756] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Successfully updated port: 1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1534.379783] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.380408] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1534.383463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.234s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.384997] env[62525]: INFO nova.compute.claims [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1534.496379] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470723} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.496946] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1534.497762] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e083eef-64a5-47ba-ae24-4f0d0d0445b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.523123] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.523422] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c32d0913-967f-4b6e-93a8-ec88d3bc28c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.542539] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1534.542539] env[62525]: value = "task-1781415" [ 1534.542539] env[62525]: _type = "Task" [ 1534.542539] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.551114] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781415, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.813588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.813588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquired lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.813588] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1534.830699] env[62525]: DEBUG nova.compute.manager [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Received event network-vif-plugged-1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.830807] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Acquiring lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.831404] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.831404] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.831520] env[62525]: DEBUG nova.compute.manager [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] No waiting events found dispatching network-vif-plugged-1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1534.831661] env[62525]: WARNING nova.compute.manager [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Received unexpected event network-vif-plugged-1a5c65b1-4069-489d-af82-975da5b3c645 for instance with vm_state building and task_state spawning. [ 1534.831823] env[62525]: DEBUG nova.compute.manager [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Received event network-changed-1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.831993] env[62525]: DEBUG nova.compute.manager [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Refreshing instance network info cache due to event network-changed-1a5c65b1-4069-489d-af82-975da5b3c645. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1534.832212] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Acquiring lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.889617] env[62525]: DEBUG nova.compute.utils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1534.893034] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1534.893133] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1534.933503] env[62525]: DEBUG nova.policy [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '187baa39499e4864a59299a00652e277', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad03ae99c1e941128e5e80b490f75f05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1535.054745] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.188132] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Successfully created port: 10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1535.360364] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1535.393539] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1535.550261] env[62525]: DEBUG nova.network.neutron [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Updating instance_info_cache with network_info: [{"id": "1a5c65b1-4069-489d-af82-975da5b3c645", "address": "fa:16:3e:15:eb:78", "network": {"id": "0c5ac85f-ec16-40f9-8c6a-81318a30b591", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1038735108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d6ae425ee99473a8e75e1e00ee1c45a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a5c65b1-40", "ovs_interfaceid": "1a5c65b1-4069-489d-af82-975da5b3c645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.555676] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.746027] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0e4fe0-df05-43ab-8623-a3f391b2f08d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.754064] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01180509-b476-4473-a5b2-3f18f5bc1edd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.784541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d56f356-8d94-4074-9ed6-8bb1324caf34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.793037] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1af72e1-483b-4f05-b784-a947c4e373d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.810340] env[62525]: DEBUG nova.compute.provider_tree [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.055696] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781415, 'name': ReconfigVM_Task, 'duration_secs': 1.080358} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.056162] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Releasing lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.056763] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Instance network_info: |[{"id": "1a5c65b1-4069-489d-af82-975da5b3c645", "address": "fa:16:3e:15:eb:78", "network": {"id": "0c5ac85f-ec16-40f9-8c6a-81318a30b591", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1038735108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d6ae425ee99473a8e75e1e00ee1c45a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a5c65b1-40", "ovs_interfaceid": "1a5c65b1-4069-489d-af82-975da5b3c645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1536.056763] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1536.057122] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Acquired lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.057353] env[62525]: DEBUG nova.network.neutron [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Refreshing network info cache for port 1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1536.058446] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:eb:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a5c65b1-4069-489d-af82-975da5b3c645', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1536.066018] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Creating folder: Project (7d6ae425ee99473a8e75e1e00ee1c45a). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1536.066796] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5ca06a-5c0c-4ec8-9379-bfd76931bb2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.072182] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a3400d5-5903-473f-93ea-175a949d93e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.099903] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-051655f7-3de1-47cd-a32d-fe9e82fac35c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.110089] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Created folder: Project (7d6ae425ee99473a8e75e1e00ee1c45a) in parent group-v369553. [ 1536.110298] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Creating folder: Instances. Parent ref: group-v369733. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1536.112986] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6fcb85f-152f-4d69-b3ca-1cf5c0f9dcb7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.120766] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1536.120766] env[62525]: value = "task-1781418" [ 1536.120766] env[62525]: _type = "Task" [ 1536.120766] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.125884] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Created folder: Instances in parent group-v369733. [ 1536.126500] env[62525]: DEBUG oslo.service.loopingcall [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.126889] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1536.127059] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c16ab6fb-ef0e-484f-892a-f5d22ade0eff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.146246] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.152661] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1536.152661] env[62525]: value = "task-1781419" [ 1536.152661] env[62525]: _type = "Task" [ 1536.152661] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.165971] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781419, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.313632] env[62525]: DEBUG nova.scheduler.client.report [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.348449] env[62525]: DEBUG nova.network.neutron [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Updated VIF entry in instance network info cache for port 1a5c65b1-4069-489d-af82-975da5b3c645. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.348817] env[62525]: DEBUG nova.network.neutron [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Updating instance_info_cache with network_info: [{"id": "1a5c65b1-4069-489d-af82-975da5b3c645", "address": "fa:16:3e:15:eb:78", "network": {"id": "0c5ac85f-ec16-40f9-8c6a-81318a30b591", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1038735108-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d6ae425ee99473a8e75e1e00ee1c45a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a5c65b1-40", "ovs_interfaceid": "1a5c65b1-4069-489d-af82-975da5b3c645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.409353] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1536.433325] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1536.433572] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1536.433728] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.433937] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1536.434060] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.434208] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1536.434414] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1536.434571] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1536.434735] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1536.434904] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1536.435090] env[62525]: DEBUG nova.virt.hardware [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1536.436296] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e134cce-aed7-4e46-b27c-0296d62e31b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.444793] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2afbc1-ff2f-41e3-803a-845ce7ddc87a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.334978] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.335094] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.335294] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.335503] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.335680] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.337586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.954s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.338058] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1537.341449] env[62525]: DEBUG oslo_concurrency.lockutils [req-4f331f49-1525-4450-8ed4-c4bafe6113b6 req-cb389a53-62cf-4390-bf8b-606df0309f60 service nova] Releasing lock "refresh_cache-0a7ef997-bda5-452e-abe0-537146bf23f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.342070] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781418, 'name': ReconfigVM_Task, 'duration_secs': 0.186701} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.342895] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Successfully updated port: 10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1537.343944] env[62525]: INFO nova.compute.manager [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Terminating instance [ 1537.348398] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.863s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.349824] env[62525]: INFO nova.compute.claims [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1537.356044] env[62525]: DEBUG nova.compute.manager [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Received event network-vif-plugged-10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.356044] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Acquiring lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.356044] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.356044] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.356044] env[62525]: DEBUG nova.compute.manager [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] No waiting events found dispatching network-vif-plugged-10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1537.356044] env[62525]: WARNING nova.compute.manager [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Received unexpected event network-vif-plugged-10079ce4-4ed3-4dc5-9fbc-8b200980365b for instance with vm_state building and task_state spawning. [ 1537.356044] env[62525]: DEBUG nova.compute.manager [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Received event network-changed-10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.356044] env[62525]: DEBUG nova.compute.manager [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Refreshing instance network info cache due to event network-changed-10079ce4-4ed3-4dc5-9fbc-8b200980365b. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1537.356044] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Acquiring lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.356044] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Acquired lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.356044] env[62525]: DEBUG nova.network.neutron [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Refreshing network info cache for port 10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.356044] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1537.356797] env[62525]: DEBUG nova.compute.manager [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1537.356886] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1537.357574] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5882d460-2b81-40a5-af28-47c303ee43aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.359720] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffea4e2-8444-4e94-b553-cd4b12260804 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.372857] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1537.373099] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781419, 'name': CreateVM_Task, 'duration_secs': 0.360186} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.374475] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc30eb92-04db-486f-b24a-7c388f0a166a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.375970] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1537.376464] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1537.376464] env[62525]: value = "task-1781420" [ 1537.376464] env[62525]: _type = "Task" [ 1537.376464] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.377330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.380024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.380024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1537.380024] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e05527-4a9a-4ccd-8bed-e0586f98fb5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.386319] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1537.386319] env[62525]: value = "task-1781421" [ 1537.386319] env[62525]: _type = "Task" [ 1537.386319] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.398241] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781420, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.398241] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1537.398241] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529eeabb-c793-8299-81c3-93d67a13db4d" [ 1537.398241] env[62525]: _type = "Task" [ 1537.398241] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.402960] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.409663] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529eeabb-c793-8299-81c3-93d67a13db4d, 'name': SearchDatastore_Task, 'duration_secs': 0.013592} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.411537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.411537] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1537.411537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.411537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.411537] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1537.411537] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee4b1e82-835f-4f3b-a8c6-2396179a44d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.421847] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1537.422052] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1537.422922] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-195fa0bf-5f8f-495a-abb0-f1cd3a30536d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.430451] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1537.430451] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521d4f40-3db2-89bf-0069-ec8e96f932d2" [ 1537.430451] env[62525]: _type = "Task" [ 1537.430451] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.439613] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521d4f40-3db2-89bf-0069-ec8e96f932d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.857937] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.861306] env[62525]: DEBUG nova.compute.utils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1537.862650] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1537.890704] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781420, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.899011] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781421, 'name': PowerOffVM_Task, 'duration_secs': 0.29541} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.899281] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1537.899449] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1537.899719] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-491ce1ec-d5a4-4094-a5cc-89c1e61c17ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.913174] env[62525]: DEBUG nova.network.neutron [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1537.942387] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521d4f40-3db2-89bf-0069-ec8e96f932d2, 'name': SearchDatastore_Task, 'duration_secs': 0.009497} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.943467] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb700e8e-940b-4927-93cc-dfff6b6ea3a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.948808] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1537.948808] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5237f82f-0fbc-0e5d-4a29-d7f90670736f" [ 1537.948808] env[62525]: _type = "Task" [ 1537.948808] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.959402] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5237f82f-0fbc-0e5d-4a29-d7f90670736f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.989046] env[62525]: DEBUG nova.network.neutron [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.053305] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1538.053611] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1538.053809] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleting the datastore file [datastore1] 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1538.054087] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5eb273ad-860b-4e0d-a925-2bd2c52c4641 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.062834] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1538.062834] env[62525]: value = "task-1781423" [ 1538.062834] env[62525]: _type = "Task" [ 1538.062834] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.071772] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.368218] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1538.389292] env[62525]: DEBUG oslo_vmware.api [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781420, 'name': PowerOnVM_Task, 'duration_secs': 0.855558} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.391877] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.394711] env[62525]: DEBUG nova.compute.manager [None req-0348f045-d1af-4f12-881d-67160680f44f tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.395567] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603e696e-bfa1-4245-bbdd-b358e1537bda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.460475] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5237f82f-0fbc-0e5d-4a29-d7f90670736f, 'name': SearchDatastore_Task, 'duration_secs': 0.010759} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.462861] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.463144] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0a7ef997-bda5-452e-abe0-537146bf23f8/0a7ef997-bda5-452e-abe0-537146bf23f8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1538.463583] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-605c4d39-c319-4a0b-951d-300ec357ee8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.474763] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1538.474763] env[62525]: value = "task-1781424" [ 1538.474763] env[62525]: _type = "Task" [ 1538.474763] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.483007] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.492580] env[62525]: DEBUG oslo_concurrency.lockutils [req-849ef0db-708c-45dd-8419-314e5244d57d req-f1767df4-adf8-4795-b586-733533ea0154 service nova] Releasing lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.492580] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquired lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.492580] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.574570] env[62525]: DEBUG oslo_vmware.api [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129395} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.576486] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1538.576680] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1538.576863] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1538.577053] env[62525]: INFO nova.compute.manager [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1538.577317] env[62525]: DEBUG oslo.service.loopingcall [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1538.577700] env[62525]: DEBUG nova.compute.manager [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1538.577782] env[62525]: DEBUG nova.network.neutron [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1538.718945] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c04a61c-52e1-4857-9b42-db03d4045293 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.728220] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3891695f-40dd-4e41-ba2d-1fe87f546fcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.770337] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdc7174-1434-4c89-bead-fa73ed9c467d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.780303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4e9dc1-7748-455e-b9dc-7f470b719cb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.797355] env[62525]: DEBUG nova.compute.provider_tree [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.897302] env[62525]: DEBUG nova.compute.manager [req-2ea08a7c-e839-4cfe-a5e5-72167823e342 req-cbb6dc4c-5621-467e-87aa-119e8e805d04 service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Received event network-vif-deleted-b3e42a52-e289-4575-bfdd-06cf88ba69ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1538.897489] env[62525]: INFO nova.compute.manager [req-2ea08a7c-e839-4cfe-a5e5-72167823e342 req-cbb6dc4c-5621-467e-87aa-119e8e805d04 service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Neutron deleted interface b3e42a52-e289-4575-bfdd-06cf88ba69ce; detaching it from the instance and deleting it from the info cache [ 1538.897735] env[62525]: DEBUG nova.network.neutron [req-2ea08a7c-e839-4cfe-a5e5-72167823e342 req-cbb6dc4c-5621-467e-87aa-119e8e805d04 service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.985980] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781424, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473671} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.986268] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0a7ef997-bda5-452e-abe0-537146bf23f8/0a7ef997-bda5-452e-abe0-537146bf23f8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1538.986494] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1538.986734] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70d8cb20-6448-46dc-8c11-42296d2ce2d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.996339] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1538.996339] env[62525]: value = "task-1781425" [ 1538.996339] env[62525]: _type = "Task" [ 1538.996339] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.004981] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.034489] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1539.172918] env[62525]: DEBUG nova.network.neutron [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Updating instance_info_cache with network_info: [{"id": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "address": "fa:16:3e:d0:43:38", "network": {"id": "4643b20c-7d8a-450b-8307-0c3d7da55d93", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1518794877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad03ae99c1e941128e5e80b490f75f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10079ce4-4e", "ovs_interfaceid": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.301221] env[62525]: DEBUG nova.scheduler.client.report [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1539.367992] env[62525]: DEBUG nova.network.neutron [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.377444] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1539.400306] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-087b58c3-3b98-40d8-8357-811a3b36c1c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.404211] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1539.404442] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1539.404596] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1539.404772] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1539.404914] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1539.405066] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1539.405274] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1539.405467] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1539.405669] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1539.405837] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1539.406034] env[62525]: DEBUG nova.virt.hardware [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1539.406836] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c65d4a-2039-4a0e-8573-a5be8e480c87 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.416755] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbec91c-12ba-4aa0-991f-ef3fc3eddacb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.424752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442afb17-c106-4887-a0b2-fe5462917d80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.445261] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.450730] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Creating folder: Project (02bd39b436bb48a5930ca78f32742928). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.462528] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d85fc07-366a-44a4-bbfc-ef47614dabc3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.464258] env[62525]: DEBUG nova.compute.manager [req-2ea08a7c-e839-4cfe-a5e5-72167823e342 req-cbb6dc4c-5621-467e-87aa-119e8e805d04 service nova] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Detach interface failed, port_id=b3e42a52-e289-4575-bfdd-06cf88ba69ce, reason: Instance 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1539.476015] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Created folder: Project (02bd39b436bb48a5930ca78f32742928) in parent group-v369553. [ 1539.476216] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Creating folder: Instances. Parent ref: group-v369736. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.476449] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a50141ae-0208-4dbb-8347-e074d92e0924 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.485327] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Created folder: Instances in parent group-v369736. [ 1539.485560] env[62525]: DEBUG oslo.service.loopingcall [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.485747] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1539.486067] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ced00c5-e6a1-4398-bfb5-05d315baec72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.505791] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06571} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.506967] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1539.507233] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.507233] env[62525]: value = "task-1781428" [ 1539.507233] env[62525]: _type = "Task" [ 1539.507233] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.507876] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84710ee-d84e-49ae-8914-6457b1bc6432 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.521024] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781428, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.538032] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 0a7ef997-bda5-452e-abe0-537146bf23f8/0a7ef997-bda5-452e-abe0-537146bf23f8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1539.540022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-208b1bdb-dd3d-42ad-984b-6ae35660452a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.559213] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1539.559213] env[62525]: value = "task-1781429" [ 1539.559213] env[62525]: _type = "Task" [ 1539.559213] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.567924] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.675352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Releasing lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.675706] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance network_info: |[{"id": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "address": "fa:16:3e:d0:43:38", "network": {"id": "4643b20c-7d8a-450b-8307-0c3d7da55d93", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1518794877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad03ae99c1e941128e5e80b490f75f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10079ce4-4e", "ovs_interfaceid": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1539.676148] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:43:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10079ce4-4ed3-4dc5-9fbc-8b200980365b', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.683966] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Creating folder: Project (ad03ae99c1e941128e5e80b490f75f05). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.684270] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a022829-83b3-43fb-96ed-3b0db7c48198 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.697145] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Created folder: Project (ad03ae99c1e941128e5e80b490f75f05) in parent group-v369553. [ 1539.697536] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Creating folder: Instances. Parent ref: group-v369739. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.697795] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc51b9c9-8025-4200-b903-8c6d7937eabb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.707630] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Created folder: Instances in parent group-v369739. [ 1539.707867] env[62525]: DEBUG oslo.service.loopingcall [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.708081] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1539.708295] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c67e52b9-f0fa-480a-b8e2-e4a002b5d000 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.729057] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.729057] env[62525]: value = "task-1781432" [ 1539.729057] env[62525]: _type = "Task" [ 1539.729057] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.737514] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781432, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.806671] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.807441] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1539.810379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.698s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.812041] env[62525]: INFO nova.compute.claims [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.870866] env[62525]: INFO nova.compute.manager [-] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Took 1.29 seconds to deallocate network for instance. [ 1540.021843] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781428, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.072013] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781429, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.245740] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781432, 'name': CreateVM_Task, 'duration_secs': 0.394807} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.245979] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1540.246783] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.246856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.247196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.247416] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57affdb4-fb81-47b2-bca6-f1669e116934 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.254032] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1540.254032] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ce2d3a-b2e0-b421-9e0e-22be705f4936" [ 1540.254032] env[62525]: _type = "Task" [ 1540.254032] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.263018] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ce2d3a-b2e0-b421-9e0e-22be705f4936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.289831] env[62525]: DEBUG nova.compute.manager [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1540.290031] env[62525]: DEBUG nova.compute.manager [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing instance network info cache due to event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1540.290305] env[62525]: DEBUG oslo_concurrency.lockutils [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.290488] env[62525]: DEBUG oslo_concurrency.lockutils [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.290678] env[62525]: DEBUG nova.network.neutron [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1540.316974] env[62525]: DEBUG nova.compute.utils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.320294] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1540.320498] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1540.369657] env[62525]: DEBUG nova.policy [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '620bb90d56cf45f6ad33a7565e66bf7b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75f1c56190b647c98a738a5cbd6ebe81', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1540.377800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.522031] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781428, 'name': CreateVM_Task, 'duration_secs': 0.588194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.522031] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1540.522656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.570011] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781429, 'name': ReconfigVM_Task, 'duration_secs': 0.637937} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.572024] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 0a7ef997-bda5-452e-abe0-537146bf23f8/0a7ef997-bda5-452e-abe0-537146bf23f8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1540.572024] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa888a31-a51d-4bc3-8df6-c0a1943ef3e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.581417] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1540.581417] env[62525]: value = "task-1781433" [ 1540.581417] env[62525]: _type = "Task" [ 1540.581417] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.594717] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781433, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.625965] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Successfully created port: 8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1540.764283] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ce2d3a-b2e0-b421-9e0e-22be705f4936, 'name': SearchDatastore_Task, 'duration_secs': 0.009992} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.764703] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.764988] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1540.765336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.765558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.765825] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1540.766206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.766568] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.766802] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1c88802-1f15-4983-b007-5c452f239185 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.768771] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c6693c-a794-4e62-bd3d-eb1b06e1239b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.774897] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1540.774897] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52efd154-063f-7af4-8c6d-699cb42641a2" [ 1540.774897] env[62525]: _type = "Task" [ 1540.774897] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.779671] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1540.779980] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1540.781205] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c242340e-8e86-490c-84aa-1b774b68a313 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.786914] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52efd154-063f-7af4-8c6d-699cb42641a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.790728] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1540.790728] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dda9b8-461e-3b9b-afdd-66dd6f7a9e28" [ 1540.790728] env[62525]: _type = "Task" [ 1540.790728] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.806687] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dda9b8-461e-3b9b-afdd-66dd6f7a9e28, 'name': SearchDatastore_Task, 'duration_secs': 0.010471} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.807221] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18b6bc86-bf3c-4091-86ed-6ab424ad4de0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.814015] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1540.814015] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520a492c-07f4-6e59-8c0e-024c50c68e52" [ 1540.814015] env[62525]: _type = "Task" [ 1540.814015] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.823676] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520a492c-07f4-6e59-8c0e-024c50c68e52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.827328] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1541.070555] env[62525]: DEBUG nova.network.neutron [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updated VIF entry in instance network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1541.070919] env[62525]: DEBUG nova.network.neutron [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.092769] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781433, 'name': Rename_Task, 'duration_secs': 0.302512} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.093380] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1541.093649] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-194cf521-ef85-4d9d-b57f-e7ce71c86358 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.101012] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1541.101012] env[62525]: value = "task-1781434" [ 1541.101012] env[62525]: _type = "Task" [ 1541.101012] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.109521] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.213481] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7aeaa76-c64c-4490-a556-4789bc0c1aac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.222035] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4e9f12-f87b-4f0f-a236-a8b0c457d9f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.260790] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32da999-a0b3-4385-a52f-b3226203ac38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.270255] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929c3998-006e-4aa6-bece-d7fe4ed77a61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.287571] env[62525]: DEBUG nova.compute.provider_tree [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.295356] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52efd154-063f-7af4-8c6d-699cb42641a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012607} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.295706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.295990] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1541.296236] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.325834] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520a492c-07f4-6e59-8c0e-024c50c68e52, 'name': SearchDatastore_Task, 'duration_secs': 0.01064} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.326149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.326413] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] bfb20735-1de9-4741-9d6f-5cd2ffedbca6/bfb20735-1de9-4741-9d6f-5cd2ffedbca6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1541.326715] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.326930] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.327180] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b779158-6ddb-4854-8731-29afc3b10bdd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.329169] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19c86155-7649-49c0-b83f-7a6dda936b85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.338760] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1541.338760] env[62525]: value = "task-1781435" [ 1541.338760] env[62525]: _type = "Task" [ 1541.338760] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.345202] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.345202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1541.345202] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf9d42f8-5eed-46fe-90d7-5d4b0ff57073 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.350740] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.356079] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1541.356079] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e547de-da42-8f68-4afc-9c195925cace" [ 1541.356079] env[62525]: _type = "Task" [ 1541.356079] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.363515] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e547de-da42-8f68-4afc-9c195925cace, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.576093] env[62525]: DEBUG oslo_concurrency.lockutils [req-ec209134-5f02-47ec-8bb3-c893b8a54c30 req-4cc22221-9a4b-47bf-aaf4-5f11091ba2b3 service nova] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.613850] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781434, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.791562] env[62525]: DEBUG nova.scheduler.client.report [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1541.841937] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1541.854569] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476573} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.854953] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] bfb20735-1de9-4741-9d6f-5cd2ffedbca6/bfb20735-1de9-4741-9d6f-5cd2ffedbca6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1541.855230] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1541.855572] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44b1ed8e-d94f-4220-a272-9ea3b3494aea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.867326] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e547de-da42-8f68-4afc-9c195925cace, 'name': SearchDatastore_Task, 'duration_secs': 0.008417} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.871626] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1541.871626] env[62525]: value = "task-1781436" [ 1541.871626] env[62525]: _type = "Task" [ 1541.871626] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.872277] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c69b7af-3222-4609-b24f-eaf03191dde4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.880955] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1541.881361] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1541.881596] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.881828] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1541.882015] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.882255] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1541.882544] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1541.882763] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1541.882971] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1541.883212] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1541.883480] env[62525]: DEBUG nova.virt.hardware [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1541.885146] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d58870-1e82-446e-8e0e-6886023c9559 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.890255] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1541.890255] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529ad5a5-193c-6683-3c3b-93ce2684377d" [ 1541.890255] env[62525]: _type = "Task" [ 1541.890255] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.900475] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.902222] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b045bd-2ad3-46d9-b787-ff1c4f726bfb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.909650] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529ad5a5-193c-6683-3c3b-93ce2684377d, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.910374] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.910763] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1541.911110] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c11a372a-7578-43d9-86af-b3ae8be00349 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.927353] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1541.927353] env[62525]: value = "task-1781437" [ 1541.927353] env[62525]: _type = "Task" [ 1541.927353] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.936613] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.113986] env[62525]: DEBUG oslo_vmware.api [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781434, 'name': PowerOnVM_Task, 'duration_secs': 0.654845} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.114709] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1542.114961] env[62525]: INFO nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1542.115199] env[62525]: DEBUG nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1542.116069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a926bc-c158-4784-950c-73bca564e39a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.253772] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Successfully updated port: 8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1542.300519] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.300519] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1542.303025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.274s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.306018] env[62525]: INFO nova.compute.claims [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1542.385425] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066037} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.385682] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.387013] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb39a77-355f-46f6-9000-7c60dab1f120 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.415121] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] bfb20735-1de9-4741-9d6f-5cd2ffedbca6/bfb20735-1de9-4741-9d6f-5cd2ffedbca6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.415964] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dfce07b-47a8-4103-826a-39ee330d8975 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.439673] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446205} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.441284] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1542.441605] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1542.441938] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1542.441938] env[62525]: value = "task-1781438" [ 1542.441938] env[62525]: _type = "Task" [ 1542.441938] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.442136] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-698bc98c-88cd-4567-83fa-a6b486f8dae3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.453123] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.454698] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1542.454698] env[62525]: value = "task-1781439" [ 1542.454698] env[62525]: _type = "Task" [ 1542.454698] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.466140] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.550554] env[62525]: DEBUG nova.compute.manager [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Received event network-vif-plugged-8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1542.550802] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] Acquiring lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.551048] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.551427] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.551490] env[62525]: DEBUG nova.compute.manager [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] No waiting events found dispatching network-vif-plugged-8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1542.551660] env[62525]: WARNING nova.compute.manager [req-3bc4f877-deac-49da-8920-47d583e65df6 req-5e439f6f-cbfc-461b-8068-f84975ad4998 service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Received unexpected event network-vif-plugged-8d49ec64-ed1a-4ad4-a592-32bd571e0800 for instance with vm_state building and task_state spawning. [ 1542.640063] env[62525]: INFO nova.compute.manager [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Took 38.45 seconds to build instance. [ 1542.673535] env[62525]: DEBUG nova.compute.manager [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1542.673793] env[62525]: DEBUG nova.compute.manager [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing instance network info cache due to event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1542.673964] env[62525]: DEBUG oslo_concurrency.lockutils [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.674488] env[62525]: DEBUG oslo_concurrency.lockutils [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.674683] env[62525]: DEBUG nova.network.neutron [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1542.759988] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.759988] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquired lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.760134] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.812058] env[62525]: DEBUG nova.compute.utils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1542.813578] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1542.813825] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1542.857811] env[62525]: DEBUG nova.policy [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1542.964480] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781438, 'name': ReconfigVM_Task, 'duration_secs': 0.298577} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.964947] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Reconfigured VM instance instance-0000003f to attach disk [datastore1] bfb20735-1de9-4741-9d6f-5cd2ffedbca6/bfb20735-1de9-4741-9d6f-5cd2ffedbca6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1542.965806] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cc9d5e4-31f2-4435-ace9-e5f28c86249d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.971397] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067524} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.972025] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.972839] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3bed95-ca06-421f-a006-7544b8cd51eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.978566] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1542.978566] env[62525]: value = "task-1781440" [ 1542.978566] env[62525]: _type = "Task" [ 1542.978566] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.994997] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.997655] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b456d1f-ef4d-4b8c-bb8d-21286d307eca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.018613] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781440, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.020100] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1543.020100] env[62525]: value = "task-1781441" [ 1543.020100] env[62525]: _type = "Task" [ 1543.020100] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.029137] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.137342] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Successfully created port: 7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.142072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5b89085a-5374-4b74-bb50-1e875da464fc tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.965s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.317152] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1543.320276] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1543.498007] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781440, 'name': Rename_Task, 'duration_secs': 0.287397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.500664] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1543.501118] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5cb18dc-77f1-467f-9091-1e989bf57a26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.509659] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1543.509659] env[62525]: value = "task-1781442" [ 1543.509659] env[62525]: _type = "Task" [ 1543.509659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.522567] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.534311] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.716779] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a89a1e-737e-4028-878f-10931c6e2e74 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.727761] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c692a65-5678-4cf5-96ce-0d42b63aee81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.731855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "0a7ef997-bda5-452e-abe0-537146bf23f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.732026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.732244] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.732434] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.732602] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.735148] env[62525]: INFO nova.compute.manager [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Terminating instance [ 1543.736487] env[62525]: DEBUG nova.compute.manager [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1543.736735] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1543.737482] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73d5d66-7346-4afc-a6a6-1b79869abcfd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.746170] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1543.773129] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a79e45c4-451d-4535-81f7-d8355a37d10d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.783228] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aebcbf8-2170-42cc-9ba9-aa875f158610 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.790237] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6848b24-1430-48e8-8711-c32d67856671 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.797392] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1543.797392] env[62525]: value = "task-1781443" [ 1543.797392] env[62525]: _type = "Task" [ 1543.797392] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.811283] env[62525]: DEBUG nova.compute.provider_tree [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.817976] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.864676] env[62525]: DEBUG nova.network.neutron [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Updating instance_info_cache with network_info: [{"id": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "address": "fa:16:3e:10:84:d1", "network": {"id": "e3d8b884-6f1d-48dd-be21-5136fea5c805", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-243468066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75f1c56190b647c98a738a5cbd6ebe81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d49ec64-ed", "ovs_interfaceid": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.870104] env[62525]: DEBUG nova.network.neutron [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updated VIF entry in instance network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.870104] env[62525]: DEBUG nova.network.neutron [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.023838] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781442, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.035889] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781441, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.306149] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781443, 'name': PowerOffVM_Task, 'duration_secs': 0.309787} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.306425] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.306599] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.306858] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ade6fd1c-26a5-4a16-9052-ee463c1782a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.313705] env[62525]: DEBUG nova.scheduler.client.report [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1544.329512] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1544.360269] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.360518] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.360721] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.360941] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.361119] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.361300] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.361552] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.361719] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.361886] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.362077] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.362273] env[62525]: DEBUG nova.virt.hardware [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.363129] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d89fb01-0dcd-412c-85b7-cf41786c6c12 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.367059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Releasing lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.367425] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Instance network_info: |[{"id": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "address": "fa:16:3e:10:84:d1", "network": {"id": "e3d8b884-6f1d-48dd-be21-5136fea5c805", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-243468066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75f1c56190b647c98a738a5cbd6ebe81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d49ec64-ed", "ovs_interfaceid": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1544.367853] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:84:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ace50835-5731-4c77-b6c0-3076d7b4aa21', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d49ec64-ed1a-4ad4-a592-32bd571e0800', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.376082] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Creating folder: Project (75f1c56190b647c98a738a5cbd6ebe81). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.379119] env[62525]: DEBUG oslo_concurrency.lockutils [req-87774d10-b58c-4f68-99ab-0a85f12484c5 req-a9aaa1c2-5496-4920-acc7-5ff1cad2a1b6 service nova] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.379540] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ec9799a-94e2-42d5-a8fe-ebd0033e7773 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.383236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a39dd89-d40a-45d8-8ff1-2bd97b2c2ff3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.391252] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.391511] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.391705] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Deleting the datastore file [datastore1] 0a7ef997-bda5-452e-abe0-537146bf23f8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.391952] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a793e80-eaae-4362-8110-93885cb0fac6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.404929] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Created folder: Project (75f1c56190b647c98a738a5cbd6ebe81) in parent group-v369553. [ 1544.405162] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Creating folder: Instances. Parent ref: group-v369742. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.405418] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65406bc6-554d-4d0d-b130-513a9ea22358 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.409702] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for the task: (returnval){ [ 1544.409702] env[62525]: value = "task-1781446" [ 1544.409702] env[62525]: _type = "Task" [ 1544.409702] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.417841] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.419119] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Created folder: Instances in parent group-v369742. [ 1544.419381] env[62525]: DEBUG oslo.service.loopingcall [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.419476] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1544.419680] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16f552fd-edbe-4d34-9ec5-59e42a3c3e1b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.440489] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.440489] env[62525]: value = "task-1781448" [ 1544.440489] env[62525]: _type = "Task" [ 1544.440489] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.449294] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781448, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.527997] env[62525]: DEBUG oslo_vmware.api [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781442, 'name': PowerOnVM_Task, 'duration_secs': 0.522277} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.531545] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1544.531545] env[62525]: INFO nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1544.531760] env[62525]: DEBUG nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1544.535020] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a7ffc0-e3c6-4374-acae-ef85ba4b49dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.544828] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781441, 'name': ReconfigVM_Task, 'duration_secs': 1.035135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.548157] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Reconfigured VM instance instance-00000040 to attach disk [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1544.549370] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07c5aeab-2b77-43d7-bb82-98e76ac5317d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.558562] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1544.558562] env[62525]: value = "task-1781449" [ 1544.558562] env[62525]: _type = "Task" [ 1544.558562] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.570317] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781449, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.774011] env[62525]: DEBUG nova.compute.manager [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Received event network-changed-8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.774297] env[62525]: DEBUG nova.compute.manager [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Refreshing instance network info cache due to event network-changed-8d49ec64-ed1a-4ad4-a592-32bd571e0800. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1544.774508] env[62525]: DEBUG oslo_concurrency.lockutils [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] Acquiring lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.774708] env[62525]: DEBUG oslo_concurrency.lockutils [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] Acquired lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.774848] env[62525]: DEBUG nova.network.neutron [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Refreshing network info cache for port 8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1544.818048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.515s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.818601] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1544.822545] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.612s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.822805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.825272] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.662s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.827015] env[62525]: INFO nova.compute.claims [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1544.848729] env[62525]: DEBUG nova.compute.manager [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.849260] env[62525]: DEBUG nova.compute.manager [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing instance network info cache due to event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1544.849594] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.849788] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.850026] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1544.857679] env[62525]: INFO nova.scheduler.client.report [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Deleted allocations for instance dfa4b57e-6219-42eb-b257-263124f9a980 [ 1544.921689] env[62525]: DEBUG oslo_vmware.api [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Task: {'id': task-1781446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249223} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.922237] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.922237] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.922378] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.922552] env[62525]: INFO nova.compute.manager [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1544.922797] env[62525]: DEBUG oslo.service.loopingcall [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.922985] env[62525]: DEBUG nova.compute.manager [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.923095] env[62525]: DEBUG nova.network.neutron [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1544.951709] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781448, 'name': CreateVM_Task, 'duration_secs': 0.4808} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.951929] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1544.952741] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.952952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.953333] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1544.953627] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88febae-cd25-4cbc-b47d-17f881dcb6e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.960635] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1544.960635] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5252dca1-2d58-ab36-baef-bfb0c2c70843" [ 1544.960635] env[62525]: _type = "Task" [ 1544.960635] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.971733] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5252dca1-2d58-ab36-baef-bfb0c2c70843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.055925] env[62525]: INFO nova.compute.manager [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Took 40.35 seconds to build instance. [ 1545.070871] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781449, 'name': Rename_Task, 'duration_secs': 0.204029} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.070871] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1545.070871] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77ba3ee1-9bd3-4812-971f-c140fda274f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.077120] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1545.077120] env[62525]: value = "task-1781450" [ 1545.077120] env[62525]: _type = "Task" [ 1545.077120] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.087074] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.332082] env[62525]: DEBUG nova.compute.utils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.337112] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Successfully updated port: 7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.337112] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1545.337112] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.367014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15a073aa-f183-4172-8d2d-21ca85b253a5 tempest-ServerGroupTestJSON-1456758530 tempest-ServerGroupTestJSON-1456758530-project-member] Lock "dfa4b57e-6219-42eb-b257-263124f9a980" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.186s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.459829] env[62525]: DEBUG nova.policy [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1545.473681] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5252dca1-2d58-ab36-baef-bfb0c2c70843, 'name': SearchDatastore_Task, 'duration_secs': 0.025108} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.475822] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.476074] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1545.476315] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.476464] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.476639] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1545.476910] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3adcbaa-6c17-418b-9458-667de01679a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.486636] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1545.486933] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1545.487577] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46acaf6d-5fae-40e4-9a0b-24113a14e878 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.496421] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1545.496421] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e250cf-cbc6-0dd6-4ddf-957ba7581b47" [ 1545.496421] env[62525]: _type = "Task" [ 1545.496421] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.506995] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e250cf-cbc6-0dd6-4ddf-957ba7581b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.550893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.550893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.550893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.550893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.550893] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.554794] env[62525]: INFO nova.compute.manager [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Terminating instance [ 1545.555854] env[62525]: DEBUG nova.compute.manager [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1545.556072] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1545.556932] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccd815e-d05b-4784-82b7-9741f75e8ecc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.560167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d923e94-d156-42b5-a967-297e25480359 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.878s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.566619] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1545.569045] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-babc6217-a78e-4174-b5f8-0559d851c8c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.577485] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1545.577485] env[62525]: value = "task-1781451" [ 1545.577485] env[62525]: _type = "Task" [ 1545.577485] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.589481] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.592859] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.633031] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updated VIF entry in instance network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.633467] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.681584] env[62525]: DEBUG nova.network.neutron [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Updated VIF entry in instance network info cache for port 8d49ec64-ed1a-4ad4-a592-32bd571e0800. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.681584] env[62525]: DEBUG nova.network.neutron [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Updating instance_info_cache with network_info: [{"id": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "address": "fa:16:3e:10:84:d1", "network": {"id": "e3d8b884-6f1d-48dd-be21-5136fea5c805", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-243468066-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75f1c56190b647c98a738a5cbd6ebe81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d49ec64-ed", "ovs_interfaceid": "8d49ec64-ed1a-4ad4-a592-32bd571e0800", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.829229] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Successfully created port: 6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.841479] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1545.841479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.841479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.841479] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1545.876994] env[62525]: DEBUG nova.network.neutron [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.008348] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e250cf-cbc6-0dd6-4ddf-957ba7581b47, 'name': SearchDatastore_Task, 'duration_secs': 0.031655} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.011779] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32c94a62-6358-4d9d-926d-0d30c22240c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.019866] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1546.019866] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524bf090-5a64-7536-5d5b-59b9539c5091" [ 1546.019866] env[62525]: _type = "Task" [ 1546.019866] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.028437] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524bf090-5a64-7536-5d5b-59b9539c5091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.093861] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781451, 'name': PowerOffVM_Task, 'duration_secs': 0.396317} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.096830] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1546.097045] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1546.097316] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.097586] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10bf5ddc-d5ff-454b-8160-c6b3c7fcb647 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.137115] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.137381] env[62525]: DEBUG nova.compute.manager [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.137643] env[62525]: DEBUG nova.compute.manager [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing instance network info cache due to event network-changed-0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.137795] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Acquiring lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.137917] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Acquired lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.138076] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Refreshing network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.187643] env[62525]: DEBUG oslo_concurrency.lockutils [req-e2a8f377-6bd4-4701-b83e-2bb978d1a71a req-d328dbd4-6cec-42b8-a06e-3ee40eee43db service nova] Releasing lock "refresh_cache-1badb7d9-692a-445e-ad47-ebd6e19f8197" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.205970] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1546.206212] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1546.206402] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Deleting the datastore file [datastore1] 24d38b8e-c48b-4562-817e-7ae57658fb1b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1546.206672] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deac986b-22b4-4519-8f5b-26582e4b01ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.215777] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for the task: (returnval){ [ 1546.215777] env[62525]: value = "task-1781453" [ 1546.215777] env[62525]: _type = "Task" [ 1546.215777] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.225057] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.226715] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80789ca6-7808-41e3-a032-274e4a63b4ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.233649] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c37d1d-fa7a-4c21-8adc-a57a66f4d29f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.238566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.238817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.239055] env[62525]: INFO nova.compute.manager [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Rebooting instance [ 1546.268741] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42448296-cba6-42e7-a85f-739f7978c1d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.277723] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f850fe-9ee1-486e-a053-fac95d2bc3a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.292851] env[62525]: DEBUG nova.compute.provider_tree [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1546.295277] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.295445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquired lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.295646] env[62525]: DEBUG nova.network.neutron [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1546.378794] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1546.381466] env[62525]: INFO nova.compute.manager [-] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Took 1.46 seconds to deallocate network for instance. [ 1546.517305] env[62525]: DEBUG nova.network.neutron [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.531450] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524bf090-5a64-7536-5d5b-59b9539c5091, 'name': SearchDatastore_Task, 'duration_secs': 0.013634} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.532313] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.532580] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1badb7d9-692a-445e-ad47-ebd6e19f8197/1badb7d9-692a-445e-ad47-ebd6e19f8197.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1546.532838] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77287d7e-0de6-4f6f-b432-d818a0dd6845 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.541385] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1546.541385] env[62525]: value = "task-1781454" [ 1546.541385] env[62525]: _type = "Task" [ 1546.541385] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.551160] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.590799] env[62525]: DEBUG oslo_vmware.api [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781450, 'name': PowerOnVM_Task, 'duration_secs': 1.082115} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.591140] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1546.591370] env[62525]: INFO nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Took 7.21 seconds to spawn the instance on the hypervisor. [ 1546.591554] env[62525]: DEBUG nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1546.592391] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9181bdb4-594a-487d-a915-1e3e1ffb9149 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.726374] env[62525]: DEBUG oslo_vmware.api [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Task: {'id': task-1781453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.496036} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.728806] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1546.731989] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1546.731989] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1546.731989] env[62525]: INFO nova.compute.manager [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1546.731989] env[62525]: DEBUG oslo.service.loopingcall [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.731989] env[62525]: DEBUG nova.compute.manager [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1546.731989] env[62525]: DEBUG nova.network.neutron [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1546.809942] env[62525]: DEBUG nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-plugged-7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.810311] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.810592] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.810853] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.811204] env[62525]: DEBUG nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] No waiting events found dispatching network-vif-plugged-7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1546.811519] env[62525]: WARNING nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received unexpected event network-vif-plugged-7a7fc668-0509-45b5-954b-ce58cc91d1e3 for instance with vm_state building and task_state spawning. [ 1546.811823] env[62525]: DEBUG nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-changed-7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.812949] env[62525]: DEBUG nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing instance network info cache due to event network-changed-7a7fc668-0509-45b5-954b-ce58cc91d1e3. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.812949] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.827801] env[62525]: ERROR nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [req-310f8a59-9588-44eb-b461-72fc247c87e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-310f8a59-9588-44eb-b461-72fc247c87e7"}]} [ 1546.856348] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1546.867617] env[62525]: DEBUG nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1546.889978] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1546.890340] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1546.890631] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.890886] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1546.891100] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.891382] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1546.891668] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1546.891904] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1546.892327] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1546.892498] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1546.892731] env[62525]: DEBUG nova.virt.hardware [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1546.893820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.894728] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b792a5e-ffd1-46d9-8a6f-133e6dff814a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.899693] env[62525]: DEBUG nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1546.899948] env[62525]: DEBUG nova.compute.provider_tree [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1546.909401] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e12c5e6-6295-4f6b-bc9e-68a8783ed8e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.914867] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updated VIF entry in instance network info cache for port 0a6eef35-6265-42d0-b939-85c1984339a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.915327] env[62525]: DEBUG nova.network.neutron [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [{"id": "0a6eef35-6265-42d0-b939-85c1984339a4", "address": "fa:16:3e:e3:e8:95", "network": {"id": "45ce7613-a551-4015-a845-12f847233cc9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-654312683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3e3f3b0c58ed4bffba386aad34b5ae37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a6eef35-62", "ovs_interfaceid": "0a6eef35-6265-42d0-b939-85c1984339a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.917352] env[62525]: DEBUG nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1546.943012] env[62525]: DEBUG nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1547.021760] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.022632] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Instance network_info: |[{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1547.025640] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.025640] env[62525]: DEBUG nova.network.neutron [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing network info cache for port 7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1547.030048] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:03:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a7fc668-0509-45b5-954b-ce58cc91d1e3', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.035792] env[62525]: DEBUG oslo.service.loopingcall [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.036837] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1547.037084] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6a6f756-3869-4964-9888-2b866e1002c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.066398] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.066398] env[62525]: value = "task-1781455" [ 1547.066398] env[62525]: _type = "Task" [ 1547.066398] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.072380] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781454, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.086588] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781455, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.109919] env[62525]: INFO nova.compute.manager [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Took 41.98 seconds to build instance. [ 1547.116684] env[62525]: DEBUG nova.network.neutron [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Updating instance_info_cache with network_info: [{"id": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "address": "fa:16:3e:d0:43:38", "network": {"id": "4643b20c-7d8a-450b-8307-0c3d7da55d93", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1518794877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad03ae99c1e941128e5e80b490f75f05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10079ce4-4e", "ovs_interfaceid": "10079ce4-4ed3-4dc5-9fbc-8b200980365b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.379038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18f3bdf-3a2f-4310-a150-9e7a7fb5b291 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.389154] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7c405e-34ba-4768-9ce5-93ab08569f21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.430786] env[62525]: DEBUG oslo_concurrency.lockutils [req-a37530d9-7349-4204-b98e-483ba570b180 req-af153960-427c-4498-b775-b618feb4bd0d service nova] Releasing lock "refresh_cache-24d38b8e-c48b-4562-817e-7ae57658fb1b" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.432066] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f431b-1b13-4ec4-b53b-b929633ce764 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.443521] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa8e134-f576-4574-bb20-6656c302d861 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.451017] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Successfully updated port: 6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.464560] env[62525]: DEBUG nova.compute.provider_tree [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.566575] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781454, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641944} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.566861] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1badb7d9-692a-445e-ad47-ebd6e19f8197/1badb7d9-692a-445e-ad47-ebd6e19f8197.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1547.567174] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1547.567337] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ab6c8cc-091e-4162-9e89-56fc2f0eb771 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.577300] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1547.577300] env[62525]: value = "task-1781456" [ 1547.577300] env[62525]: _type = "Task" [ 1547.577300] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.584370] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781455, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.590792] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781456, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.612788] env[62525]: DEBUG oslo_concurrency.lockutils [None req-889baa76-4136-490c-a815-c1536ee54eef tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.507s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.620085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Releasing lock "refresh_cache-bfb20735-1de9-4741-9d6f-5cd2ffedbca6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.622693] env[62525]: DEBUG nova.compute.manager [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1547.623585] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f864d877-6724-462d-a70d-9a5fa75ccbfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.797649] env[62525]: DEBUG nova.compute.manager [req-d1861d0f-b07e-4d9e-ad65-ef204766becf req-682da9d5-93ee-411b-9e56-a34bdf71b473 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Received event network-vif-deleted-0a6eef35-6265-42d0-b939-85c1984339a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1547.797649] env[62525]: INFO nova.compute.manager [req-d1861d0f-b07e-4d9e-ad65-ef204766becf req-682da9d5-93ee-411b-9e56-a34bdf71b473 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Neutron deleted interface 0a6eef35-6265-42d0-b939-85c1984339a4; detaching it from the instance and deleting it from the info cache [ 1547.797649] env[62525]: DEBUG nova.network.neutron [req-d1861d0f-b07e-4d9e-ad65-ef204766becf req-682da9d5-93ee-411b-9e56-a34bdf71b473 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.907554] env[62525]: DEBUG nova.network.neutron [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updated VIF entry in instance network info cache for port 7a7fc668-0509-45b5-954b-ce58cc91d1e3. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.907964] env[62525]: DEBUG nova.network.neutron [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.953162] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.953341] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.955378] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.969174] env[62525]: DEBUG nova.scheduler.client.report [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.038344] env[62525]: DEBUG nova.network.neutron [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.085954] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781455, 'name': CreateVM_Task, 'duration_secs': 0.993874} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.086551] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.087290] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.087537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.088011] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.088266] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1820900c-45e4-46f2-b90b-41771c918146 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.094929] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781456, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074932} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.095532] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1548.096298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb796c-dcc7-4ae8-89a9-eeb529bedf0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.100262] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1548.100262] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52aef52f-0dfa-6cc5-0ff5-0c75da84fb02" [ 1548.100262] env[62525]: _type = "Task" [ 1548.100262] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.120870] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 1badb7d9-692a-445e-ad47-ebd6e19f8197/1badb7d9-692a-445e-ad47-ebd6e19f8197.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.121921] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c680d46-7ee4-42b2-be33-6bb0d099e152 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.147892] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aef52f-0dfa-6cc5-0ff5-0c75da84fb02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.154573] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1548.154573] env[62525]: value = "task-1781457" [ 1548.154573] env[62525]: _type = "Task" [ 1548.154573] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.163569] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.194691] env[62525]: INFO nova.compute.manager [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Rebuilding instance [ 1548.255660] env[62525]: DEBUG nova.compute.manager [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1548.256674] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56229405-8560-408c-b2e2-a6ad4305fc98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.301452] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f981d49-7e26-426f-bab3-dbdb02598154 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.312951] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208a4d65-5eb3-408d-ac9d-6b0ec03efda2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.352622] env[62525]: DEBUG nova.compute.manager [req-d1861d0f-b07e-4d9e-ad65-ef204766becf req-682da9d5-93ee-411b-9e56-a34bdf71b473 service nova] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Detach interface failed, port_id=0a6eef35-6265-42d0-b939-85c1984339a4, reason: Instance 24d38b8e-c48b-4562-817e-7ae57658fb1b could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1548.411989] env[62525]: DEBUG oslo_concurrency.lockutils [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.411989] env[62525]: DEBUG nova.compute.manager [req-143ac5f2-79fd-43e1-bbd1-a359804761d4 req-b82f923c-43d6-417d-b599-6d9f75fd85c6 service nova] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Received event network-vif-deleted-1a5c65b1-4069-489d-af82-975da5b3c645 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.474713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.649s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.475286] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1548.478985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.456s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.480575] env[62525]: INFO nova.compute.claims [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1548.520970] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1548.544034] env[62525]: INFO nova.compute.manager [-] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Took 1.81 seconds to deallocate network for instance. [ 1548.611782] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aef52f-0dfa-6cc5-0ff5-0c75da84fb02, 'name': SearchDatastore_Task, 'duration_secs': 0.045752} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.612100] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.612334] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.612626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.612789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.612881] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.613146] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-620b643d-aba4-405d-a5f3-7a73f49e3e45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.625133] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.625327] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1548.626058] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a7da4a-bb33-45b8-9ec6-1918cbabfc43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.632144] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1548.632144] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5292296a-1768-43ef-abc2-199ff9249d48" [ 1548.632144] env[62525]: _type = "Task" [ 1548.632144] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.640355] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5292296a-1768-43ef-abc2-199ff9249d48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.651158] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27af27d-ec41-410d-9cca-2a433ad08702 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.659553] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Doing hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1548.660120] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-6b58720d-55c4-4e5e-8695-bdd712ac928b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.665137] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781457, 'name': ReconfigVM_Task, 'duration_secs': 0.299972} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.667281] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 1badb7d9-692a-445e-ad47-ebd6e19f8197/1badb7d9-692a-445e-ad47-ebd6e19f8197.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.668852] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b949a3e2-b11b-44a8-bc36-9a1c42c76f31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.670858] env[62525]: DEBUG oslo_vmware.api [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1548.670858] env[62525]: value = "task-1781458" [ 1548.670858] env[62525]: _type = "Task" [ 1548.670858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.676270] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1548.676270] env[62525]: value = "task-1781459" [ 1548.676270] env[62525]: _type = "Task" [ 1548.676270] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.680128] env[62525]: DEBUG oslo_vmware.api [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781458, 'name': ResetVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.689157] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781459, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.741440] env[62525]: DEBUG nova.network.neutron [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updating instance_info_cache with network_info: [{"id": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "address": "fa:16:3e:ca:a5:99", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0336ac-59", "ovs_interfaceid": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.769084] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1548.769345] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8a4d136-1360-41b2-b3d0-3474c9bea03f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.777564] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1548.777564] env[62525]: value = "task-1781460" [ 1548.777564] env[62525]: _type = "Task" [ 1548.777564] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.786226] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.920719] env[62525]: DEBUG nova.compute.manager [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Received event network-vif-plugged-6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.920719] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.920719] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.920719] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.920719] env[62525]: DEBUG nova.compute.manager [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] No waiting events found dispatching network-vif-plugged-6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1548.921142] env[62525]: WARNING nova.compute.manager [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Received unexpected event network-vif-plugged-6b0336ac-59dc-4910-adf2-48b4b073fdb6 for instance with vm_state building and task_state spawning. [ 1548.921142] env[62525]: DEBUG nova.compute.manager [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Received event network-changed-6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.921232] env[62525]: DEBUG nova.compute.manager [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Refreshing instance network info cache due to event network-changed-6b0336ac-59dc-4910-adf2-48b4b073fdb6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1548.921368] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Acquiring lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.985016] env[62525]: DEBUG nova.compute.utils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.989887] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1548.989887] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.052197] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.078109] env[62525]: DEBUG nova.policy [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eee5a510a514320b8f5eb0a6bf66121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1be24793ee4d83babc07ff8ad5abad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1549.144232] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5292296a-1768-43ef-abc2-199ff9249d48, 'name': SearchDatastore_Task, 'duration_secs': 0.01083} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.145301] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e093f2a6-4736-49ba-a64b-fc38afad26f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.151907] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1549.151907] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523b0507-a294-f3ea-3915-9fe51a7dcad5" [ 1549.151907] env[62525]: _type = "Task" [ 1549.151907] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.160948] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b0507-a294-f3ea-3915-9fe51a7dcad5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.183579] env[62525]: DEBUG oslo_vmware.api [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781458, 'name': ResetVM_Task, 'duration_secs': 0.102328} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.187620] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Did hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1549.188103] env[62525]: DEBUG nova.compute.manager [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1549.189215] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686c6a5-f607-479c-a4e3-bf7990bd7b3a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.199418] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781459, 'name': Rename_Task, 'duration_secs': 0.163681} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.201678] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.205018] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0052dc5-db0c-439f-8085-04494618fabf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.214768] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1549.214768] env[62525]: value = "task-1781461" [ 1549.214768] env[62525]: _type = "Task" [ 1549.214768] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.224438] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.248056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.248056] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Instance network_info: |[{"id": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "address": "fa:16:3e:ca:a5:99", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0336ac-59", "ovs_interfaceid": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1549.248056] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Acquired lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.248056] env[62525]: DEBUG nova.network.neutron [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Refreshing network info cache for port 6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.248056] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:a5:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b0336ac-59dc-4910-adf2-48b4b073fdb6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1549.256287] env[62525]: DEBUG oslo.service.loopingcall [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.257427] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1549.257828] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3b700c5-1987-42a6-a4b5-2947ce04fa7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.283154] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1549.283154] env[62525]: value = "task-1781462" [ 1549.283154] env[62525]: _type = "Task" [ 1549.283154] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.291434] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781460, 'name': PowerOffVM_Task, 'duration_secs': 0.16006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.291434] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1549.291434] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1549.292367] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f8237c-4a02-46b7-af13-b9f044652016 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.299034] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781462, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.304051] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1549.304331] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f6ebb49-b16c-4be7-af3e-e25b51681ff7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.333991] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1549.334344] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1549.334618] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Deleting the datastore file [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1549.334959] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07a6353f-ced0-43ea-b91f-0cc643e466c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.343316] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1549.343316] env[62525]: value = "task-1781464" [ 1549.343316] env[62525]: _type = "Task" [ 1549.343316] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.352756] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.391740] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Successfully created port: dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.490334] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1549.666187] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523b0507-a294-f3ea-3915-9fe51a7dcad5, 'name': SearchDatastore_Task, 'duration_secs': 0.024297} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.666187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.666187] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6be49426-ddda-461e-908f-593c0904b129/6be49426-ddda-461e-908f-593c0904b129.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1549.666426] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2cef895-6f3d-4dee-a62e-9eaa6e4c70c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.680768] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1549.680768] env[62525]: value = "task-1781465" [ 1549.680768] env[62525]: _type = "Task" [ 1549.680768] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.693073] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.719581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6993891d-8b35-4e0b-b089-04092cc506b0 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.481s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.735659] env[62525]: DEBUG oslo_vmware.api [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781461, 'name': PowerOnVM_Task, 'duration_secs': 0.498438} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.735970] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1549.736658] env[62525]: INFO nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Took 7.89 seconds to spawn the instance on the hypervisor. [ 1549.737302] env[62525]: DEBUG nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1549.738032] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75027bb-70fe-47e7-acf8-a467f822ba26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.796651] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781462, 'name': CreateVM_Task, 'duration_secs': 0.383785} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.797656] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1549.798501] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.798714] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.799105] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1549.799691] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78d4518c-9ec3-4212-aa2b-50601b8568df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.807048] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1549.807048] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a606e9-cc88-6542-7205-ff2261e402f7" [ 1549.807048] env[62525]: _type = "Task" [ 1549.807048] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.820297] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a606e9-cc88-6542-7205-ff2261e402f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.859707] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146569} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.860808] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1549.860808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1549.860808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1549.936619] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d1a4f5-0c5d-4f97-84d5-5ea4935ac000 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.946908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51540e61-617a-4b8f-9f7b-c0c5947f9cab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.981008] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5855d4e6-a65f-4c69-89e2-58bb7770a6eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.989846] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f5e59e-4722-4fda-834c-88bad5da6611 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.010664] env[62525]: DEBUG nova.compute.provider_tree [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.137182] env[62525]: DEBUG nova.network.neutron [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updated VIF entry in instance network info cache for port 6b0336ac-59dc-4910-adf2-48b4b073fdb6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.137182] env[62525]: DEBUG nova.network.neutron [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updating instance_info_cache with network_info: [{"id": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "address": "fa:16:3e:ca:a5:99", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0336ac-59", "ovs_interfaceid": "6b0336ac-59dc-4910-adf2-48b4b073fdb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.194312] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781465, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.261104] env[62525]: INFO nova.compute.manager [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Took 44.79 seconds to build instance. [ 1550.334065] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a606e9-cc88-6542-7205-ff2261e402f7, 'name': SearchDatastore_Task, 'duration_secs': 0.023118} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.334065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.334065] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.334065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.334065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.334065] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1550.334065] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2779db9d-bdae-43c7-90a9-4715e3979029 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.339228] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1550.339228] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1550.339228] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cd9dba5-6dbb-4143-b1ba-554f3b6283b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.347023] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1550.347023] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523e7a57-4e8c-74a6-6305-91b1f5b599f3" [ 1550.347023] env[62525]: _type = "Task" [ 1550.347023] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.353256] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523e7a57-4e8c-74a6-6305-91b1f5b599f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.436024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.436024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.436024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.436024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.436024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.438974] env[62525]: INFO nova.compute.manager [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Terminating instance [ 1550.442521] env[62525]: DEBUG nova.compute.manager [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1550.442921] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1550.443958] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d035ef48-02d4-4680-bb72-2950001f0805 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.453193] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1550.453755] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f90ef27-4787-4701-aeda-a7343a95e19b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.463340] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1550.463340] env[62525]: value = "task-1781466" [ 1550.463340] env[62525]: _type = "Task" [ 1550.463340] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.477357] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781466, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.514243] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1550.516879] env[62525]: DEBUG nova.scheduler.client.report [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1550.545970] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1550.546251] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1550.546411] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.546596] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1550.546746] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.546923] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1550.547241] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1550.547417] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1550.547588] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1550.547757] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1550.548112] env[62525]: DEBUG nova.virt.hardware [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1550.548830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c1076d-9428-43dd-a372-606c125cd105 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.558545] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8860fb75-7657-42d5-ad67-5887ebad038d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.639347] env[62525]: DEBUG oslo_concurrency.lockutils [req-9757daca-462b-40dd-8b9f-4c5276bbf1cc req-5d0a1b23-c701-4f9c-a317-9f34a19a076e service nova] Releasing lock "refresh_cache-81fbb354-21f2-43f0-8aa3-e80e10235326" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.691626] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678733} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.691932] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6be49426-ddda-461e-908f-593c0904b129/6be49426-ddda-461e-908f-593c0904b129.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.692176] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.692451] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6802084-179e-43bc-988d-5036b359968d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.700196] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1550.700196] env[62525]: value = "task-1781467" [ 1550.700196] env[62525]: _type = "Task" [ 1550.700196] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.713019] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.763215] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e640ee2d-0706-4bbc-a1c5-afd0110c6534 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.303s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.868557] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523e7a57-4e8c-74a6-6305-91b1f5b599f3, 'name': SearchDatastore_Task, 'duration_secs': 0.020641} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.872915] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f87a7225-43f6-4121-8c61-c6be038ca2fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.883022] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1550.883022] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cdcdc3-0e94-6bd4-30c1-c8fe9f6e746b" [ 1550.883022] env[62525]: _type = "Task" [ 1550.883022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.892577] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdcdc3-0e94-6bd4-30c1-c8fe9f6e746b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.911784] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1550.912045] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1550.912210] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.912393] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1550.912538] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.912703] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1550.912921] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1550.913100] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1550.913269] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1550.913431] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1550.913862] env[62525]: DEBUG nova.virt.hardware [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1550.914456] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fc04d1-6b0f-4e6e-9dc1-cb4b94f83f16 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.923491] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356c7048-3ee0-40b8-ad76-80077d1b7bac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.938922] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1550.944828] env[62525]: DEBUG oslo.service.loopingcall [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.945126] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1550.945341] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-460b9568-8e7a-4b85-8287-dbddb4c76a84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.966408] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1550.966408] env[62525]: value = "task-1781468" [ 1550.966408] env[62525]: _type = "Task" [ 1550.966408] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.985186] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781466, 'name': PowerOffVM_Task, 'duration_secs': 0.173143} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.985392] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781468, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.985653] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1550.985818] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1550.986079] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31445e2c-8878-4553-ae4b-43751a00ffb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.021817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.022397] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1551.025546] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.069s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.027272] env[62525]: INFO nova.compute.claims [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1551.169047] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1551.169485] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1551.172019] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Deleting the datastore file [datastore1] bfb20735-1de9-4741-9d6f-5cd2ffedbca6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1551.172019] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28ab38ff-54f7-40f3-84a4-9d3c427f341d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.182507] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for the task: (returnval){ [ 1551.182507] env[62525]: value = "task-1781470" [ 1551.182507] env[62525]: _type = "Task" [ 1551.182507] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.191881] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.196666] env[62525]: DEBUG nova.compute.manager [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Received event network-vif-plugged-dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1551.196907] env[62525]: DEBUG oslo_concurrency.lockutils [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] Acquiring lock "024c7393-de18-4c76-a27e-757710824494-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.197137] env[62525]: DEBUG oslo_concurrency.lockutils [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] Lock "024c7393-de18-4c76-a27e-757710824494-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.197664] env[62525]: DEBUG oslo_concurrency.lockutils [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] Lock "024c7393-de18-4c76-a27e-757710824494-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.197664] env[62525]: DEBUG nova.compute.manager [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] No waiting events found dispatching network-vif-plugged-dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1551.197664] env[62525]: WARNING nova.compute.manager [req-cb33ac2f-9f52-4704-a886-c94534a360fd req-a65abbee-937b-4766-b53b-0427d685b99e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Received unexpected event network-vif-plugged-dfd804e8-763f-4fc8-8879-1c9e19cfcadc for instance with vm_state building and task_state spawning. [ 1551.214052] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070337} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.214440] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.216013] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec47845-7949-4b0e-b556-4fc29c915630 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.240199] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 6be49426-ddda-461e-908f-593c0904b129/6be49426-ddda-461e-908f-593c0904b129.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.240547] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d47dc05e-35d1-4438-8ef2-366e66c64002 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.258452] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Successfully updated port: dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.267386] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1551.267386] env[62525]: value = "task-1781471" [ 1551.267386] env[62525]: _type = "Task" [ 1551.267386] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.280633] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.336476] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.336772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.337022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.337221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.337415] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.339532] env[62525]: INFO nova.compute.manager [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Terminating instance [ 1551.341582] env[62525]: DEBUG nova.compute.manager [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1551.341654] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1551.342503] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe2c285-fb09-4af4-b1a2-5e9303a1f494 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.352023] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.352241] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a74bf09-a830-4044-b893-16816db6b40b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.359755] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1551.359755] env[62525]: value = "task-1781472" [ 1551.359755] env[62525]: _type = "Task" [ 1551.359755] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.369158] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.394343] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cdcdc3-0e94-6bd4-30c1-c8fe9f6e746b, 'name': SearchDatastore_Task, 'duration_secs': 0.010383} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.394634] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.394963] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 81fbb354-21f2-43f0-8aa3-e80e10235326/81fbb354-21f2-43f0-8aa3-e80e10235326.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1551.395264] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceeeb645-7c81-49c9-9ffa-decd95a5a45b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.404726] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1551.404726] env[62525]: value = "task-1781473" [ 1551.404726] env[62525]: _type = "Task" [ 1551.404726] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.414460] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.479984] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781468, 'name': CreateVM_Task, 'duration_secs': 0.35069} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.480336] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1551.480647] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.480830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.481211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1551.481494] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-358d23fe-0a63-4518-bbcc-964307757368 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.487897] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1551.487897] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c5ea45-8b46-1db0-5389-253231f98d9c" [ 1551.487897] env[62525]: _type = "Task" [ 1551.487897] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.497402] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c5ea45-8b46-1db0-5389-253231f98d9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.533196] env[62525]: DEBUG nova.compute.utils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1551.540023] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1551.540023] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1551.589863] env[62525]: DEBUG nova.policy [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0deb1ab43142f29a15397a2e23d048', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105f108590e14c649fff545b5b96f4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1551.701234] env[62525]: DEBUG oslo_vmware.api [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Task: {'id': task-1781470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207786} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.702152] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1551.702386] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1551.702908] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1551.703171] env[62525]: INFO nova.compute.manager [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1551.703463] env[62525]: DEBUG oslo.service.loopingcall [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1551.703681] env[62525]: DEBUG nova.compute.manager [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1551.703920] env[62525]: DEBUG nova.network.neutron [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1551.762038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.762094] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.762222] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.789527] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781471, 'name': ReconfigVM_Task, 'duration_secs': 0.328318} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.790228] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 6be49426-ddda-461e-908f-593c0904b129/6be49426-ddda-461e-908f-593c0904b129.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.791314] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81828285-2e4a-4ddb-b926-9cd1ed768b09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.802587] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1551.802587] env[62525]: value = "task-1781474" [ 1551.802587] env[62525]: _type = "Task" [ 1551.802587] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.820998] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781474, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.876379] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781472, 'name': PowerOffVM_Task, 'duration_secs': 0.196649} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.877205] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1551.877205] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1551.877205] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4afc2cb-b34b-4674-b663-a683a37e61ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.923025] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781473, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.998966] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c5ea45-8b46-1db0-5389-253231f98d9c, 'name': SearchDatastore_Task, 'duration_secs': 0.010993} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.999687] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.000196] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1552.000196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.000447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.000544] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1552.000813] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33cb635c-3d1b-4319-a7e3-ff001fec0960 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.009533] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.010503] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.010503] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86f655bb-bbe5-411a-8ce1-d3599f0ba93c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.018541] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1552.018541] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c4ffde-ed2c-97b3-70c1-c13af5eedcb5" [ 1552.018541] env[62525]: _type = "Task" [ 1552.018541] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.029927] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c4ffde-ed2c-97b3-70c1-c13af5eedcb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.032466] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.032466] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.032875] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Deleting the datastore file [datastore1] 1badb7d9-692a-445e-ad47-ebd6e19f8197 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.033796] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Successfully created port: f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.035685] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c4159c5-e64b-42b8-a833-063e79f968a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.037851] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1552.050094] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for the task: (returnval){ [ 1552.050094] env[62525]: value = "task-1781476" [ 1552.050094] env[62525]: _type = "Task" [ 1552.050094] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.060871] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.145087] env[62525]: DEBUG nova.compute.manager [req-501f2b51-5921-4a77-8d15-1fd6483cc09d req-26b5a922-5e2a-4ffa-b6de-e23ce6c48513 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Received event network-vif-deleted-10079ce4-4ed3-4dc5-9fbc-8b200980365b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.145298] env[62525]: INFO nova.compute.manager [req-501f2b51-5921-4a77-8d15-1fd6483cc09d req-26b5a922-5e2a-4ffa-b6de-e23ce6c48513 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Neutron deleted interface 10079ce4-4ed3-4dc5-9fbc-8b200980365b; detaching it from the instance and deleting it from the info cache [ 1552.145474] env[62525]: DEBUG nova.network.neutron [req-501f2b51-5921-4a77-8d15-1fd6483cc09d req-26b5a922-5e2a-4ffa-b6de-e23ce6c48513 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.290954] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "16667060-2172-4c1b-a3c8-340bb38846cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.291399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.300317] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.316900] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781474, 'name': Rename_Task, 'duration_secs': 0.282288} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.317212] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1552.317468] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-631123aa-2222-4583-963c-ad40831db8dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.330275] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1552.330275] env[62525]: value = "task-1781477" [ 1552.330275] env[62525]: _type = "Task" [ 1552.330275] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.345907] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.419098] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582774} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.422031] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 81fbb354-21f2-43f0-8aa3-e80e10235326/81fbb354-21f2-43f0-8aa3-e80e10235326.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1552.422448] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1552.423610] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5152dfa9-d615-4f7d-9ba4-3c2eb89e1930 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.431260] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1552.431260] env[62525]: value = "task-1781478" [ 1552.431260] env[62525]: _type = "Task" [ 1552.431260] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.448140] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.482406] env[62525]: DEBUG nova.network.neutron [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Updating instance_info_cache with network_info: [{"id": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "address": "fa:16:3e:fc:e7:eb", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfd804e8-76", "ovs_interfaceid": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.532305] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c4ffde-ed2c-97b3-70c1-c13af5eedcb5, 'name': SearchDatastore_Task, 'duration_secs': 0.012105} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.535863] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ac4fe45-fd1d-4020-af87-96b9bc43fab0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.543104] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1552.543104] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52632fe3-930a-78cc-baeb-85787c33ae38" [ 1552.543104] env[62525]: _type = "Task" [ 1552.543104] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.564979] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52632fe3-930a-78cc-baeb-85787c33ae38, 'name': SearchDatastore_Task, 'duration_secs': 0.011136} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.566343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.566751] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1552.567500] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b08f91-9844-4dc6-869a-3c2b8a86b47d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.573076] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-133ddd67-7454-4847-9f9a-0e868762f8cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.575145] env[62525]: DEBUG oslo_vmware.api [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Task: {'id': task-1781476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160598} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.575864] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1552.576104] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1552.576341] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1552.576593] env[62525]: INFO nova.compute.manager [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1552.576854] env[62525]: DEBUG oslo.service.loopingcall [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.577472] env[62525]: DEBUG nova.compute.manager [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1552.577609] env[62525]: DEBUG nova.network.neutron [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1552.583043] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ddf20d-f58a-4c7d-8d98-ccbdfb151e4a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.587739] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1552.587739] env[62525]: value = "task-1781479" [ 1552.587739] env[62525]: _type = "Task" [ 1552.587739] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.620373] env[62525]: DEBUG nova.network.neutron [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.624045] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afba2cf-96ff-4ff5-86bf-74d3aaa2758c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.629834] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.635864] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0761c76d-b832-4d7a-9ecb-1375ea7abb20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.656459] env[62525]: DEBUG nova.compute.provider_tree [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1552.657785] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2aa76666-ed61-4307-8f85-48571ea8fe14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.668861] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d9136d-2d6a-447b-b692-eee5e5b0fedd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.707382] env[62525]: DEBUG nova.compute.manager [req-501f2b51-5921-4a77-8d15-1fd6483cc09d req-26b5a922-5e2a-4ffa-b6de-e23ce6c48513 service nova] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Detach interface failed, port_id=10079ce4-4ed3-4dc5-9fbc-8b200980365b, reason: Instance bfb20735-1de9-4741-9d6f-5cd2ffedbca6 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1552.796877] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1552.845117] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781477, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.947674] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07098} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.948154] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1552.949099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a76bf8e-8fd5-443a-a561-00d5ba54452d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.978397] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 81fbb354-21f2-43f0-8aa3-e80e10235326/81fbb354-21f2-43f0-8aa3-e80e10235326.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1552.979088] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8e68f71-e733-4721-b08e-fadbc04cd52a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.994557] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.995117] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Instance network_info: |[{"id": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "address": "fa:16:3e:fc:e7:eb", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfd804e8-76", "ovs_interfaceid": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1552.995722] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:e7:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfd804e8-763f-4fc8-8879-1c9e19cfcadc', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.005687] env[62525]: DEBUG oslo.service.loopingcall [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.006875] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 024c7393-de18-4c76-a27e-757710824494] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.007371] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62c1013f-27ac-4c38-91c6-38cc3ebb1b07 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.024881] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1553.024881] env[62525]: value = "task-1781480" [ 1553.024881] env[62525]: _type = "Task" [ 1553.024881] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.032188] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.032188] env[62525]: value = "task-1781481" [ 1553.032188] env[62525]: _type = "Task" [ 1553.032188] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.038876] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.046110] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781481, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.057779] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1553.097194] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='86c69e9b46b3ba3a6bf9088937f68959',container_format='bare',created_at=2024-12-12T00:13:16Z,direct_url=,disk_format='vmdk',id=f384cb86-ee9a-480a-89e0-d09d86894f5f,min_disk=1,min_ram=0,name='tempest-test-snap-730097634',owner='105f108590e14c649fff545b5b96f4fd',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-12T00:13:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.097581] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.097581] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.097805] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.097905] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.098118] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.098346] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.098557] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.098690] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.098906] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.099041] env[62525]: DEBUG nova.virt.hardware [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.100035] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5276b5a-9753-4e81-8151-c4d048dea297 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.106366] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781479, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.114167] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3877ae2a-d218-45ce-843d-c84ca17ffc7c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.131505] env[62525]: INFO nova.compute.manager [-] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Took 1.43 seconds to deallocate network for instance. [ 1553.161488] env[62525]: DEBUG nova.scheduler.client.report [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1553.223906] env[62525]: DEBUG nova.compute.manager [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Received event network-changed-dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1553.224122] env[62525]: DEBUG nova.compute.manager [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Refreshing instance network info cache due to event network-changed-dfd804e8-763f-4fc8-8879-1c9e19cfcadc. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1553.226633] env[62525]: DEBUG oslo_concurrency.lockutils [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] Acquiring lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.226853] env[62525]: DEBUG oslo_concurrency.lockutils [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] Acquired lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.227098] env[62525]: DEBUG nova.network.neutron [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Refreshing network info cache for port dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.327296] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.341011] env[62525]: DEBUG oslo_vmware.api [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781477, 'name': PowerOnVM_Task, 'duration_secs': 0.558069} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.341372] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1553.341609] env[62525]: INFO nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1553.341813] env[62525]: DEBUG nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1553.342665] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5280ecc9-afb2-43f8-b179-094c32c8fe60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.352995] env[62525]: DEBUG nova.network.neutron [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.543083] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.548829] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781481, 'name': CreateVM_Task, 'duration_secs': 0.497887} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.548995] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 024c7393-de18-4c76-a27e-757710824494] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.550832] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.551009] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.551328] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.551609] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e506525-cddf-433c-b920-cd2db1cbc87a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.556058] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1553.556058] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52adc3d5-8a37-d7c5-fb07-042e6c82c731" [ 1553.556058] env[62525]: _type = "Task" [ 1553.556058] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.563851] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52adc3d5-8a37-d7c5-fb07-042e6c82c731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.599623] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61024} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.599902] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1553.600121] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1553.600372] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92da5987-0fb5-45f2-8c73-81de028729ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.611186] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1553.611186] env[62525]: value = "task-1781482" [ 1553.611186] env[62525]: _type = "Task" [ 1553.611186] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.624817] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.643138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.666697] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.668298] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1553.670257] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.445s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.670559] env[62525]: DEBUG nova.objects.instance [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lazy-loading 'resources' on Instance uuid 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1553.858682] env[62525]: INFO nova.compute.manager [-] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Took 1.28 seconds to deallocate network for instance. [ 1553.862028] env[62525]: INFO nova.compute.manager [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Took 41.76 seconds to build instance. [ 1554.044718] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781480, 'name': ReconfigVM_Task, 'duration_secs': 0.664959} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.047763] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 81fbb354-21f2-43f0-8aa3-e80e10235326/81fbb354-21f2-43f0-8aa3-e80e10235326.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1554.047763] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b14f1f92-0385-476f-a87b-6bf5092dbfd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.055481] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1554.055481] env[62525]: value = "task-1781483" [ 1554.055481] env[62525]: _type = "Task" [ 1554.055481] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.071682] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781483, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.075365] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52adc3d5-8a37-d7c5-fb07-042e6c82c731, 'name': SearchDatastore_Task, 'duration_secs': 0.043957} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.075842] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.076250] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.079018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.079018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.079018] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.079018] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae571b65-4122-44a8-82a3-507935ec6e42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.090568] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.091019] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1554.092162] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec7811b3-5386-4ce4-ac5a-0f16272bfb15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.102487] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1554.102487] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520cc662-66b8-57d3-638a-33929e9c20c4" [ 1554.102487] env[62525]: _type = "Task" [ 1554.102487] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.113696] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520cc662-66b8-57d3-638a-33929e9c20c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.124528] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231011} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.124980] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1554.127247] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b834574d-a3bf-4f84-94fc-73d892d8efdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.149798] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.153210] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4e80ba5-c6c6-4782-9156-8be6bc9e252a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.174162] env[62525]: DEBUG nova.compute.utils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1554.180419] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1554.181055] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1554.183494] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1554.183494] env[62525]: value = "task-1781484" [ 1554.183494] env[62525]: _type = "Task" [ 1554.183494] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.195842] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781484, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.228796] env[62525]: DEBUG nova.network.neutron [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Updated VIF entry in instance network info cache for port dfd804e8-763f-4fc8-8879-1c9e19cfcadc. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.228796] env[62525]: DEBUG nova.network.neutron [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Updating instance_info_cache with network_info: [{"id": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "address": "fa:16:3e:fc:e7:eb", "network": {"id": "448d5fe0-7468-4676-88e3-27fbde04f612", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1765024461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1be24793ee4d83babc07ff8ad5abad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfd804e8-76", "ovs_interfaceid": "dfd804e8-763f-4fc8-8879-1c9e19cfcadc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.243122] env[62525]: DEBUG nova.policy [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f6e065dce947b2a31313b33a08132c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3900af0b29fa40beb95a4260054c8e5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1554.305507] env[62525]: DEBUG nova.compute.manager [req-dd651a23-2ec9-48d7-92ff-2ee477357717 req-195d452a-d1b9-4b02-83b9-cbe3bb1af6c6 service nova] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Received event network-vif-deleted-8d49ec64-ed1a-4ad4-a592-32bd571e0800 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1554.337249] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Successfully updated port: f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.364885] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8019f496-37db-4c14-916c-714e2181cc7c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.292s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.367190] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.544105] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Successfully created port: 6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1554.568998] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781483, 'name': Rename_Task, 'duration_secs': 0.151198} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.570213] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1554.570959] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fdcbe9-9086-476b-b19b-e2a50e3c8e61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.573720] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3ced7bc-b201-48d4-9d17-59036b067caf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.584133] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3153db8e-699f-4e2c-82c8-de4e63c54826 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.588659] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1554.588659] env[62525]: value = "task-1781485" [ 1554.588659] env[62525]: _type = "Task" [ 1554.588659] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.626151] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4772fc10-c10a-4a70-91ed-50811eae1c15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.632347] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781485, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.646050] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319f0424-342e-411c-9fcb-4b4944b3c2e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.650166] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520cc662-66b8-57d3-638a-33929e9c20c4, 'name': SearchDatastore_Task, 'duration_secs': 0.016638} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.651310] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca7b2ec-6bc9-4768-92ff-f5db7e70516b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.664646] env[62525]: DEBUG nova.compute.provider_tree [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.667912] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1554.667912] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5203e8c9-277b-02af-dfdc-06ff90d6eaea" [ 1554.667912] env[62525]: _type = "Task" [ 1554.667912] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.677440] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5203e8c9-277b-02af-dfdc-06ff90d6eaea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.685544] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1554.698332] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781484, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.730484] env[62525]: DEBUG oslo_concurrency.lockutils [req-0f7cafc8-4489-4be1-bb4f-114871d4fe23 req-6aada3bd-38fa-41ff-9a63-ce0e718914da service nova] Releasing lock "refresh_cache-024c7393-de18-4c76-a27e-757710824494" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.838573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.838734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.839052] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.100703] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781485, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.166378] env[62525]: DEBUG nova.scheduler.client.report [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.181072] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5203e8c9-277b-02af-dfdc-06ff90d6eaea, 'name': SearchDatastore_Task, 'duration_secs': 0.011217} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.181359] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.181648] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 024c7393-de18-4c76-a27e-757710824494/024c7393-de18-4c76-a27e-757710824494.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1555.181915] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18944537-a0e5-426c-843e-bc3ea828d6b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.189937] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1555.189937] env[62525]: value = "task-1781486" [ 1555.189937] env[62525]: _type = "Task" [ 1555.189937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.208540] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781484, 'name': ReconfigVM_Task, 'duration_secs': 0.69108} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.211405] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Reconfigured VM instance instance-00000040 to attach disk [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca/a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.212102] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.212322] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7dcd3f89-9538-49a2-b83d-de0446eae389 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.219759] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1555.219759] env[62525]: value = "task-1781487" [ 1555.219759] env[62525]: _type = "Task" [ 1555.219759] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.228559] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781487, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.255107] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Received event network-vif-plugged-f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.255480] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Acquiring lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.255860] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.256144] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.256408] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] No waiting events found dispatching network-vif-plugged-f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1555.256667] env[62525]: WARNING nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Received unexpected event network-vif-plugged-f2cd5102-dd15-44ea-9596-d4251d37814c for instance with vm_state building and task_state spawning. [ 1555.256948] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Received event network-changed-f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.257262] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Refreshing instance network info cache due to event network-changed-f2cd5102-dd15-44ea-9596-d4251d37814c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1555.257567] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Acquiring lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.374129] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.606690] env[62525]: DEBUG oslo_vmware.api [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781485, 'name': PowerOnVM_Task, 'duration_secs': 0.605598} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.606986] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1555.607212] env[62525]: INFO nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1555.607436] env[62525]: DEBUG nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1555.608219] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34be7af1-b960-4132-bec2-47a244a0c28a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.623424] env[62525]: DEBUG nova.network.neutron [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Updating instance_info_cache with network_info: [{"id": "f2cd5102-dd15-44ea-9596-d4251d37814c", "address": "fa:16:3e:be:b6:e3", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2cd5102-dd", "ovs_interfaceid": "f2cd5102-dd15-44ea-9596-d4251d37814c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.676422] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.678888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.301s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.679229] env[62525]: DEBUG nova.objects.instance [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lazy-loading 'resources' on Instance uuid 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.702198] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1555.710030] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781486, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.710633] env[62525]: INFO nova.scheduler.client.report [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted allocations for instance 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1 [ 1555.730094] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781487, 'name': Rename_Task, 'duration_secs': 0.159749} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.730403] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1555.731023] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe2a96b-f6d9-4d01-8f07-a24ed1aee5a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.740443] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1555.740710] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1555.740873] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1555.741067] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1555.741223] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1555.741372] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1555.741606] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1555.741785] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1555.741958] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1555.742143] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1555.742319] env[62525]: DEBUG nova.virt.hardware [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1555.743273] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0e78c8-8dac-429c-acac-9a6490f4dc35 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.747277] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1555.747277] env[62525]: value = "task-1781488" [ 1555.747277] env[62525]: _type = "Task" [ 1555.747277] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.754467] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049a7ae4-915e-4875-a759-67bad630ec12 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.761372] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.133653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.133653] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Instance network_info: |[{"id": "f2cd5102-dd15-44ea-9596-d4251d37814c", "address": "fa:16:3e:be:b6:e3", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2cd5102-dd", "ovs_interfaceid": "f2cd5102-dd15-44ea-9596-d4251d37814c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1556.133653] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Acquired lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.133653] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Refreshing network info cache for port f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.133653] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:b6:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2cd5102-dd15-44ea-9596-d4251d37814c', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.142950] env[62525]: DEBUG oslo.service.loopingcall [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.144112] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Successfully updated port: 6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.147835] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.148039] env[62525]: INFO nova.compute.manager [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Took 41.14 seconds to build instance. [ 1556.149179] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86f3ed8f-3b24-43e0-918a-9bdfe4343865 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.167044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53570ee8-5a45-4ec2-9103-cd2e1eb6a642 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.166s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.174219] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.174219] env[62525]: value = "task-1781489" [ 1556.174219] env[62525]: _type = "Task" [ 1556.174219] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.187203] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781489, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.207197] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550738} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.207771] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 024c7393-de18-4c76-a27e-757710824494/024c7393-de18-4c76-a27e-757710824494.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1556.208122] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1556.208717] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff8b1d5b-ce25-443d-aa13-f06b433ae82b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.220295] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6d853de3-70b7-4ef4-8b32-daa753ac98f5 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.081s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.223024] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1556.223024] env[62525]: value = "task-1781490" [ 1556.223024] env[62525]: _type = "Task" [ 1556.223024] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.231596] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781490, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.259553] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781488, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.339698] env[62525]: DEBUG nova.compute.manager [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Received event network-vif-plugged-6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1556.339982] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Acquiring lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.340210] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.340370] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.340551] env[62525]: DEBUG nova.compute.manager [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] No waiting events found dispatching network-vif-plugged-6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1556.340697] env[62525]: WARNING nova.compute.manager [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Received unexpected event network-vif-plugged-6d3644a6-dbaa-4a30-930a-53beadf8704a for instance with vm_state building and task_state spawning. [ 1556.340860] env[62525]: DEBUG nova.compute.manager [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Received event network-changed-6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1556.341020] env[62525]: DEBUG nova.compute.manager [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Refreshing instance network info cache due to event network-changed-6d3644a6-dbaa-4a30-930a-53beadf8704a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1556.341209] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Acquiring lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.341338] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Acquired lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.341756] env[62525]: DEBUG nova.network.neutron [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Refreshing network info cache for port 6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.537217] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1675738c-14c9-4dd4-9b4f-1bdef8c436b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.545714] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2483870-3dfb-455b-afb3-bf22cbccdf00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.577308] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cb406c-1222-4ebc-8e78-ad0c8170cee4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.585241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd547e3a-fd83-4ba8-b432-e2fda4856726 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.600250] env[62525]: DEBUG nova.compute.provider_tree [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1556.650347] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.687030] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781489, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.693754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.695122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.695122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.695122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.695122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.696576] env[62525]: INFO nova.compute.manager [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Terminating instance [ 1556.698504] env[62525]: DEBUG nova.compute.manager [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1556.698696] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.699590] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bb45e7-a02e-4568-8ae4-a59d308c7ba3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.708189] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.708514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-247fa48c-90f4-48e5-a0c0-75bf461735f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.716995] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1556.716995] env[62525]: value = "task-1781491" [ 1556.716995] env[62525]: _type = "Task" [ 1556.716995] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.733722] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.736323] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781490, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114316} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.736586] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1556.737460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666b52ab-962e-42c2-ab6b-39d229e01434 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.763403] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 024c7393-de18-4c76-a27e-757710824494/024c7393-de18-4c76-a27e-757710824494.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1556.769151] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-662caf50-0a58-46f8-942c-b318aa018a83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.790122] env[62525]: DEBUG oslo_vmware.api [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781488, 'name': PowerOnVM_Task, 'duration_secs': 0.637946} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.791439] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1556.791666] env[62525]: DEBUG nova.compute.manager [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1556.791983] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1556.791983] env[62525]: value = "task-1781492" [ 1556.791983] env[62525]: _type = "Task" [ 1556.791983] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.792696] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1b2b1e-f773-4c19-b574-23cdddf7ee4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.803411] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781492, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.878751] env[62525]: DEBUG nova.network.neutron [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1556.903941] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Updated VIF entry in instance network info cache for port f2cd5102-dd15-44ea-9596-d4251d37814c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1556.904356] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Updating instance_info_cache with network_info: [{"id": "f2cd5102-dd15-44ea-9596-d4251d37814c", "address": "fa:16:3e:be:b6:e3", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2cd5102-dd", "ovs_interfaceid": "f2cd5102-dd15-44ea-9596-d4251d37814c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.956288] env[62525]: DEBUG nova.network.neutron [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.123488] env[62525]: ERROR nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [req-0954e070-8f52-484a-9fd7-19b75f0a096a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0954e070-8f52-484a-9fd7-19b75f0a096a"}]} [ 1557.140844] env[62525]: DEBUG nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1557.156619] env[62525]: DEBUG nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1557.156619] env[62525]: DEBUG nova.compute.provider_tree [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1557.168209] env[62525]: DEBUG nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1557.171053] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.171342] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.189572] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781489, 'name': CreateVM_Task, 'duration_secs': 0.800536} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.189805] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1557.190635] env[62525]: DEBUG nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1557.193116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.193285] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.193665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.194530] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe2a94f7-1ac1-4898-85b7-9ba61b95b101 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.199602] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1557.199602] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5228c1f7-8e7f-8d57-676e-498ba63c961d" [ 1557.199602] env[62525]: _type = "Task" [ 1557.199602] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.211452] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5228c1f7-8e7f-8d57-676e-498ba63c961d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.230884] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781491, 'name': PowerOffVM_Task, 'duration_secs': 0.281125} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.231174] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.231344] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1557.231610] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-140a8f3a-24ac-462c-b7fc-c8e1ba30424a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.304979] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781492, 'name': ReconfigVM_Task, 'duration_secs': 0.307084} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.305123] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 024c7393-de18-4c76-a27e-757710824494/024c7393-de18-4c76-a27e-757710824494.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1557.305821] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d2d5a66-16b3-4b7b-a011-704e7c08dca2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.318745] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.323318] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1557.323318] env[62525]: value = "task-1781494" [ 1557.323318] env[62525]: _type = "Task" [ 1557.323318] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.323318] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1557.323542] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1557.323634] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] f93669f2-c59d-4f3f-85a2-a60d714326ac {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1557.326400] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-851e74d1-fbed-4672-a658-c5219c7ab835 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.339580] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781494, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.342295] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1557.342295] env[62525]: value = "task-1781495" [ 1557.342295] env[62525]: _type = "Task" [ 1557.342295] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.357330] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.407020] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Releasing lock "refresh_cache-808491cc-b195-4e81-afa5-86bd6ed8cb25" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.407353] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-changed-7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1557.407559] env[62525]: DEBUG nova.compute.manager [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing instance network info cache due to event network-changed-7a7fc668-0509-45b5-954b-ce58cc91d1e3. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1557.408280] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.408280] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.408280] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing network info cache for port 7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1557.458852] env[62525]: DEBUG oslo_concurrency.lockutils [req-58f9b738-29cd-4e7b-ba5f-917dc59cf790 req-831962f4-97cf-4833-b2c4-de0db2ca8d73 service nova] Releasing lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.459186] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.459521] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.541385] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e1139b-89fa-4718-ad76-cf47350c519c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.549614] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb46b688-bda8-4570-af20-ef7d3b30917f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.582578] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bd7b89-2d68-4e90-b093-1d55971972ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.591246] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f805a6a5-422f-4ad5-9fcb-f71852cb78a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.606025] env[62525]: DEBUG nova.compute.provider_tree [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.675079] env[62525]: DEBUG nova.compute.utils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1557.711023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.711263] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Processing image f384cb86-ee9a-480a-89e0-d09d86894f5f {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.711553] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.711708] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.711893] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.712163] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b0b5d9e-1534-4599-aee9-e3806be1a62d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.723597] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.723794] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.724558] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2209ba29-7fdc-422d-a075-50f4d40e7dea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.730490] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1557.730490] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e3609b-437d-045e-4cd3-9caf14cdeaed" [ 1557.730490] env[62525]: _type = "Task" [ 1557.730490] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.738860] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3609b-437d-045e-4cd3-9caf14cdeaed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.822923] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.823312] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.823570] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.823800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.824030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.826195] env[62525]: INFO nova.compute.manager [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Terminating instance [ 1557.830866] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "refresh_cache-a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.831041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquired lock "refresh_cache-a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.831229] env[62525]: DEBUG nova.network.neutron [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.838590] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781494, 'name': Rename_Task, 'duration_secs': 0.158105} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.839439] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1557.839721] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0804e4-d079-4016-ac2e-ab991a57d2c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.850696] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1557.850696] env[62525]: value = "task-1781496" [ 1557.850696] env[62525]: _type = "Task" [ 1557.850696] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.857326] env[62525]: DEBUG oslo_vmware.api [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354462} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.857908] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1557.858134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1557.858329] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1557.858521] env[62525]: INFO nova.compute.manager [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1557.858763] env[62525]: DEBUG oslo.service.loopingcall [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.859245] env[62525]: DEBUG nova.compute.manager [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1557.859345] env[62525]: DEBUG nova.network.neutron [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1557.863754] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.004167] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.112143] env[62525]: DEBUG nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1558.135777] env[62525]: DEBUG nova.compute.manager [req-7425410f-914e-43f4-b45f-58c9a9a55e44 req-c6ed3b96-b612-48a8-aaab-917d29eb8f4b service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Received event network-vif-deleted-a0cbf762-a06a-49a1-8925-b6235d3c0380 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.135991] env[62525]: INFO nova.compute.manager [req-7425410f-914e-43f4-b45f-58c9a9a55e44 req-c6ed3b96-b612-48a8-aaab-917d29eb8f4b service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Neutron deleted interface a0cbf762-a06a-49a1-8925-b6235d3c0380; detaching it from the instance and deleting it from the info cache [ 1558.136283] env[62525]: DEBUG nova.network.neutron [req-7425410f-914e-43f4-b45f-58c9a9a55e44 req-c6ed3b96-b612-48a8-aaab-917d29eb8f4b service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.179127] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.244349] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1558.244639] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Fetch image to [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed/OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1558.244859] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Downloading stream optimized image f384cb86-ee9a-480a-89e0-d09d86894f5f to [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed/OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed.vmdk on the data store datastore1 as vApp {{(pid=62525) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1558.245054] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Downloading image file data f384cb86-ee9a-480a-89e0-d09d86894f5f to the ESX as VM named 'OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed' {{(pid=62525) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1558.252204] env[62525]: DEBUG nova.network.neutron [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updating instance_info_cache with network_info: [{"id": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "address": "fa:16:3e:fc:c0:64", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3644a6-db", "ovs_interfaceid": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.292723] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updated VIF entry in instance network info cache for port 7a7fc668-0509-45b5-954b-ce58cc91d1e3. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1558.293171] env[62525]: DEBUG nova.network.neutron [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.330140] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1558.330140] env[62525]: value = "resgroup-9" [ 1558.330140] env[62525]: _type = "ResourcePool" [ 1558.330140] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1558.330389] env[62525]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d3841c46-8169-4e35-9a16-c2732277c870 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.358266] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease: (returnval){ [ 1558.358266] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1558.358266] env[62525]: _type = "HttpNfcLease" [ 1558.358266] env[62525]: } obtained for vApp import into resource pool (val){ [ 1558.358266] env[62525]: value = "resgroup-9" [ 1558.358266] env[62525]: _type = "ResourcePool" [ 1558.358266] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1558.358266] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the lease: (returnval){ [ 1558.358266] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1558.358266] env[62525]: _type = "HttpNfcLease" [ 1558.358266] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1558.365469] env[62525]: DEBUG nova.network.neutron [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.367193] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781496, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.370963] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1558.370963] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1558.370963] env[62525]: _type = "HttpNfcLease" [ 1558.370963] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1558.417773] env[62525]: DEBUG nova.network.neutron [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.608183] env[62525]: DEBUG nova.network.neutron [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.616623] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.938s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.619628] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.726s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.619918] env[62525]: DEBUG nova.objects.instance [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lazy-loading 'resources' on Instance uuid 0a7ef997-bda5-452e-abe0-537146bf23f8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.639926] env[62525]: INFO nova.scheduler.client.report [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleted allocations for instance 06716b84-3761-40b0-b76a-0c6ebf0d6aa7 [ 1558.641253] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2d793e0-60f1-431d-b65f-4a9aa02b0b01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.654929] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28212504-fd27-4b87-8ca3-bda78a04cc1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.697393] env[62525]: DEBUG nova.compute.manager [req-7425410f-914e-43f4-b45f-58c9a9a55e44 req-c6ed3b96-b612-48a8-aaab-917d29eb8f4b service nova] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Detach interface failed, port_id=a0cbf762-a06a-49a1-8925-b6235d3c0380, reason: Instance f93669f2-c59d-4f3f-85a2-a60d714326ac could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1558.754477] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.754780] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance network_info: |[{"id": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "address": "fa:16:3e:fc:c0:64", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3644a6-db", "ovs_interfaceid": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1558.755656] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:c0:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d3644a6-dbaa-4a30-930a-53beadf8704a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.765419] env[62525]: DEBUG oslo.service.loopingcall [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.766484] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.766654] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f72ccc26-95c9-49c8-94e8-c3840ae09207 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.789279] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.789279] env[62525]: value = "task-1781498" [ 1558.789279] env[62525]: _type = "Task" [ 1558.789279] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.798298] env[62525]: DEBUG oslo_concurrency.lockutils [req-67a14fd6-e94e-4d4a-af5d-3da92579170e req-00f19269-3f1b-4f14-8b56-0fc827601733 service nova] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.798915] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781498, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.866068] env[62525]: DEBUG oslo_vmware.api [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781496, 'name': PowerOnVM_Task, 'duration_secs': 0.721308} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.866873] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1558.867244] env[62525]: INFO nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1558.867493] env[62525]: DEBUG nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.868391] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d54fda4-8d55-4d26-abe0-5bfbab391593 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.873082] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1558.873082] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1558.873082] env[62525]: _type = "HttpNfcLease" [ 1558.873082] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1558.921619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Releasing lock "refresh_cache-a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.922249] env[62525]: DEBUG nova.compute.manager [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1558.922530] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1558.923754] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31249071-b0d1-44cc-9ea3-8e07464e6c67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.933771] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.934266] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb4dbc7c-d02a-4808-8620-872f0ab8bd02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.942081] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1558.942081] env[62525]: value = "task-1781499" [ 1558.942081] env[62525]: _type = "Task" [ 1558.942081] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.952531] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.110631] env[62525]: INFO nova.compute.manager [-] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Took 1.25 seconds to deallocate network for instance. [ 1559.152712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d192f49d-5b7a-4699-8b30-529a36603d9d tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "06716b84-3761-40b0-b76a-0c6ebf0d6aa7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.817s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.249801] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.250135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.250399] env[62525]: INFO nova.compute.manager [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Attaching volume 42c6b2b4-d3a1-429c-bad7-7c6c77797b64 to /dev/sdb [ 1559.288338] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59261f51-bfcb-4251-939c-7e73ba9b4980 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.304288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b515fb9e-31ea-4b31-9b6f-1a4a2a5f772e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.306682] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781498, 'name': CreateVM_Task, 'duration_secs': 0.503501} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.308960] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1559.310124] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.310306] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.310667] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1559.310920] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02bff8fb-2066-49d6-8d80-535598db087b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.316286] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1559.316286] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b06599-033d-4a57-6413-d8919869e7ef" [ 1559.316286] env[62525]: _type = "Task" [ 1559.316286] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.325979] env[62525]: DEBUG nova.virt.block_device [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updating existing volume attachment record: f966f797-1678-4f80-8b81-f9686ffe1805 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1559.344061] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b06599-033d-4a57-6413-d8919869e7ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.370286] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1559.370286] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1559.370286] env[62525]: _type = "HttpNfcLease" [ 1559.370286] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1559.370579] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1559.370579] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b88ec-95ea-f701-104e-fe87dd7b72fa" [ 1559.370579] env[62525]: _type = "HttpNfcLease" [ 1559.370579] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1559.371356] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902a9614-47dd-4424-ac5a-85003c72637b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.381070] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1559.381070] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1559.450391] env[62525]: INFO nova.compute.manager [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Took 29.31 seconds to build instance. [ 1559.464144] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-61e2da04-9c83-4286-a7f9-f6511736f417 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.466204] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781499, 'name': PowerOffVM_Task, 'duration_secs': 0.285158} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.470338] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1559.470602] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1559.474905] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-506c7f33-9a04-4dc1-890a-261820e341b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.513781] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1559.513781] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1559.513781] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Deleting the datastore file [datastore1] a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1559.514059] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3f2a2f1-4c77-4415-93a3-24361b289348 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.525018] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for the task: (returnval){ [ 1559.525018] env[62525]: value = "task-1781502" [ 1559.525018] env[62525]: _type = "Task" [ 1559.525018] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.531524] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.545181] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d0e6f9-5f98-4be8-a66d-dbd59661fe97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.552815] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dad39a-57d7-4a6b-89e0-0d7a22286d08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.586242] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07270a5c-c054-4cda-95bb-c476735593a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.596194] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4a0b0e-6b01-4a79-b6dd-240dce3d2ade {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.609495] env[62525]: DEBUG nova.compute.provider_tree [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.617516] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.837829] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b06599-033d-4a57-6413-d8919869e7ef, 'name': SearchDatastore_Task, 'duration_secs': 0.025468} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.840404] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.840712] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1559.840997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.841190] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.841391] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1559.841778] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a9bfd7b-dadc-450d-81d3-85709ccf7faf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.865899] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1559.866122] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1559.868337] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb36e783-1ce0-4292-acc8-1e72b2188b04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.876827] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1559.876827] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52596329-6acc-4490-5ca1-11d153fee7b1" [ 1559.876827] env[62525]: _type = "Task" [ 1559.876827] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.892026] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52596329-6acc-4490-5ca1-11d153fee7b1, 'name': SearchDatastore_Task, 'duration_secs': 0.012919} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.895587] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34252000-cc6c-4bde-9d9c-39e6c9e2b57c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.902206] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1559.902206] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8d5f-b5b8-68c3-a258-b3790670b466" [ 1559.902206] env[62525]: _type = "Task" [ 1559.902206] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.912952] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527b8d5f-b5b8-68c3-a258-b3790670b466, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.958029] env[62525]: DEBUG oslo_concurrency.lockutils [None req-45eb8fc6-1716-4986-8c28-dce60d3af104 tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.833s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.036311] env[62525]: DEBUG oslo_vmware.api [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Task: {'id': task-1781502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143787} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.037907] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1560.038148] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1560.038312] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1560.038498] env[62525]: INFO nova.compute.manager [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1560.038776] env[62525]: DEBUG oslo.service.loopingcall [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1560.038961] env[62525]: DEBUG nova.compute.manager [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1560.039068] env[62525]: DEBUG nova.network.neutron [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1560.055921] env[62525]: DEBUG nova.network.neutron [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.112744] env[62525]: DEBUG nova.scheduler.client.report [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1560.278693] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.279176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.279232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.279407] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.279591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.282027] env[62525]: INFO nova.compute.manager [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Terminating instance [ 1560.283924] env[62525]: DEBUG nova.compute.manager [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1560.284096] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.284968] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a53e4b3-d6d1-4438-8a3f-061eed944de7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.294808] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1560.295123] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5efc918-2c85-4d07-b4c6-bc00fdcd1773 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.303700] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1560.303700] env[62525]: value = "task-1781505" [ 1560.303700] env[62525]: _type = "Task" [ 1560.303700] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.314928] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.401078] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1560.401368] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1560.402357] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338e3737-487c-445b-b31c-901fbffaf8c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.415627] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1560.415846] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1560.416045] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527b8d5f-b5b8-68c3-a258-b3790670b466, 'name': SearchDatastore_Task, 'duration_secs': 0.010278} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.416243] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d588ad1a-5cfd-41dc-b829-80c1656fbec6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.417720] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.417981] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1560.418275] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bccf870-994d-4c99-8b99-0e68c9eb41ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.427264] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1560.427264] env[62525]: value = "task-1781506" [ 1560.427264] env[62525]: _type = "Task" [ 1560.427264] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.436016] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.560441] env[62525]: DEBUG nova.network.neutron [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.619830] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.622067] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.570s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.622307] env[62525]: DEBUG nova.objects.instance [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lazy-loading 'resources' on Instance uuid 24d38b8e-c48b-4562-817e-7ae57658fb1b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.644069] env[62525]: INFO nova.scheduler.client.report [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Deleted allocations for instance 0a7ef997-bda5-452e-abe0-537146bf23f8 [ 1560.684244] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "024c7393-de18-4c76-a27e-757710824494" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.684581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.685025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "024c7393-de18-4c76-a27e-757710824494-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.685119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.685346] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.689471] env[62525]: INFO nova.compute.manager [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Terminating instance [ 1560.691361] env[62525]: DEBUG nova.compute.manager [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1560.691605] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.692589] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaaab45-2509-4a6b-be57-ebd77eaf49dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.703177] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1560.703499] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64079d9e-4c06-4efc-9304-044035cf579c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.717219] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1560.717219] env[62525]: value = "task-1781507" [ 1560.717219] env[62525]: _type = "Task" [ 1560.717219] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.726708] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.756352] env[62525]: DEBUG oslo_vmware.rw_handles [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f41e0a-66e8-4238-7a20-d697ba3afcbb/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1560.756721] env[62525]: INFO nova.virt.vmwareapi.images [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Downloaded image file data f384cb86-ee9a-480a-89e0-d09d86894f5f [ 1560.757905] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80501a77-2db4-4565-bd8e-608aa729512d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.775754] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6efd135-3c0a-4de1-a6bc-167d381bf23a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.818833] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781505, 'name': PowerOffVM_Task, 'duration_secs': 0.205375} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.819473] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1560.819473] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.819864] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5570ef30-7d1d-4e28-b337-1c5198d33fca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.840638] env[62525]: INFO nova.virt.vmwareapi.images [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] The imported VM was unregistered [ 1560.843950] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1560.844259] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.844624] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e99dc05-4641-49f6-bca9-de7ab5e93c3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.884243] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.884473] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed/OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed.vmdk to [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk. {{(pid=62525) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1560.884755] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8d84a709-e787-4034-a3ff-701fb2cd5aa1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.894136] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1560.894136] env[62525]: value = "task-1781510" [ 1560.894136] env[62525]: _type = "Task" [ 1560.894136] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.904766] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.941124] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781506, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.948808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1560.949118] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1560.949248] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleting the datastore file [datastore1] 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.949541] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0edd4474-6d6c-4624-8567-5b7a7209b0c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.959530] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for the task: (returnval){ [ 1560.959530] env[62525]: value = "task-1781511" [ 1560.959530] env[62525]: _type = "Task" [ 1560.959530] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.974697] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.062606] env[62525]: INFO nova.compute.manager [-] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Took 1.02 seconds to deallocate network for instance. [ 1561.155091] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6b0593bb-9523-4250-8ad2-ef3167edf0cf tempest-InstanceActionsNegativeTestJSON-2001946827 tempest-InstanceActionsNegativeTestJSON-2001946827-project-member] Lock "0a7ef997-bda5-452e-abe0-537146bf23f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.423s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.228287] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781507, 'name': PowerOffVM_Task, 'duration_secs': 0.310389} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.230969] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1561.231191] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1561.231627] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d52fa18-54d5-4380-a578-cf6e91c47451 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.407971] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.415672] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1561.415916] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1561.416115] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleting the datastore file [datastore1] 024c7393-de18-4c76-a27e-757710824494 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1561.416381] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dfbc07a-fb88-4123-983d-957d279e344f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.423991] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for the task: (returnval){ [ 1561.423991] env[62525]: value = "task-1781513" [ 1561.423991] env[62525]: _type = "Task" [ 1561.423991] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.437468] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324875cf-6903-499e-bfdb-524e3f1e298f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.445573] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.445818] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643883} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.446474] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1561.446730] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1561.446981] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84c9cf8a-75cd-4f72-9319-c3a27d0f3e99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.451413] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c95950-6c5c-44d0-a7b8-b5f3829b8e65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.456707] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1561.456707] env[62525]: value = "task-1781514" [ 1561.456707] env[62525]: _type = "Task" [ 1561.456707] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.489641] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a39196-ecda-4f7b-948f-7288283b5bb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.495519] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.501614] env[62525]: DEBUG oslo_vmware.api [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Task: {'id': task-1781511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.512727} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.503998] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.504219] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1561.504401] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1561.504577] env[62525]: INFO nova.compute.manager [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1561.504828] env[62525]: DEBUG oslo.service.loopingcall [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.505112] env[62525]: DEBUG nova.compute.manager [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1561.505211] env[62525]: DEBUG nova.network.neutron [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1561.507991] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e718fcd7-9c4d-4ead-b016-2de9c5e9ba64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.526691] env[62525]: DEBUG nova.compute.provider_tree [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.571055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.911576] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.942534] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.948529] env[62525]: DEBUG nova.compute.manager [req-80c78fa4-9822-45e9-b05d-099781a86793 req-079ffd39-0bb5-4a5b-bdda-369b7e2f00f1 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Received event network-vif-deleted-62c6a0e2-0091-4863-a677-cbdf737769b7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1561.948764] env[62525]: INFO nova.compute.manager [req-80c78fa4-9822-45e9-b05d-099781a86793 req-079ffd39-0bb5-4a5b-bdda-369b7e2f00f1 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Neutron deleted interface 62c6a0e2-0091-4863-a677-cbdf737769b7; detaching it from the instance and deleting it from the info cache [ 1561.948922] env[62525]: DEBUG nova.network.neutron [req-80c78fa4-9822-45e9-b05d-099781a86793 req-079ffd39-0bb5-4a5b-bdda-369b7e2f00f1 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.972356] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124183} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.972965] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1561.973856] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acd803e-7ccd-4c8a-964c-68858770b358 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.005140] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1562.005512] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5662c916-61fb-42d5-95d0-9d866e782664 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.034175] env[62525]: DEBUG nova.scheduler.client.report [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1562.041025] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1562.041025] env[62525]: value = "task-1781516" [ 1562.041025] env[62525]: _type = "Task" [ 1562.041025] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.050180] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781516, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.355167] env[62525]: DEBUG nova.network.neutron [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.409995] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.439433] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.452491] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cd7b666-2342-4d45-9522-111b8aa0045a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.466383] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e2b422-a227-49fd-a72c-f25c0fac8085 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.509615] env[62525]: DEBUG nova.compute.manager [req-80c78fa4-9822-45e9-b05d-099781a86793 req-079ffd39-0bb5-4a5b-bdda-369b7e2f00f1 service nova] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Detach interface failed, port_id=62c6a0e2-0091-4863-a677-cbdf737769b7, reason: Instance 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1562.540106] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.543187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.216s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.545130] env[62525]: INFO nova.compute.claims [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.560434] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.568641] env[62525]: INFO nova.scheduler.client.report [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Deleted allocations for instance 24d38b8e-c48b-4562-817e-7ae57658fb1b [ 1562.860238] env[62525]: INFO nova.compute.manager [-] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Took 1.35 seconds to deallocate network for instance. [ 1562.911357] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.945621] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.059035] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781516, 'name': ReconfigVM_Task, 'duration_secs': 1.011668} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.060046] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1563.060680] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3ece7e2-f66d-4a10-aa43-ff39cc2a6412 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.071143] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1563.071143] env[62525]: value = "task-1781517" [ 1563.071143] env[62525]: _type = "Task" [ 1563.071143] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.084678] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781517, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.085282] env[62525]: DEBUG oslo_concurrency.lockutils [None req-77e4924e-6589-431b-b1e1-c0f6163cb370 tempest-ServerRescueTestJSONUnderV235-1955954239 tempest-ServerRescueTestJSONUnderV235-1955954239-project-member] Lock "24d38b8e-c48b-4562-817e-7ae57658fb1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.535s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.366938] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.411014] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.441615] env[62525]: DEBUG oslo_vmware.api [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Task: {'id': task-1781513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.567474} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.441615] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1563.441615] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1563.441809] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1563.441928] env[62525]: INFO nova.compute.manager [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] [instance: 024c7393-de18-4c76-a27e-757710824494] Took 2.75 seconds to destroy the instance on the hypervisor. [ 1563.442328] env[62525]: DEBUG oslo.service.loopingcall [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1563.442411] env[62525]: DEBUG nova.compute.manager [-] [instance: 024c7393-de18-4c76-a27e-757710824494] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1563.442488] env[62525]: DEBUG nova.network.neutron [-] [instance: 024c7393-de18-4c76-a27e-757710824494] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1563.587860] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781517, 'name': Rename_Task, 'duration_secs': 0.402323} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.588192] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.588388] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0721e3f-2e5e-45ed-afce-5854b1d7f275 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.598759] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1563.598759] env[62525]: value = "task-1781518" [ 1563.598759] env[62525]: _type = "Task" [ 1563.598759] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.614522] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.913659] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781510, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.847073} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.914432] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed/OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed.vmdk to [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk. [ 1563.914808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Cleaning up location [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1563.915239] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_5279e62e-1ed8-41ef-bd0e-0de4948cebed {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1563.915558] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2f4aad0-a2f0-4cbd-8121-6c4715ea6403 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.925047] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155d27fb-5ee1-4f9e-a98a-3a00b3b93694 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.929398] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1563.929398] env[62525]: value = "task-1781519" [ 1563.929398] env[62525]: _type = "Task" [ 1563.929398] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.938459] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e3b066-1393-47f7-8bdb-b4b2f10ee65d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.951020] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.988804] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dbe43c-9258-4a98-ada1-3d4121c99bc6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.998388] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1554a5-8e55-4216-b689-daf94d815d57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.017180] env[62525]: DEBUG nova.compute.provider_tree [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.030618] env[62525]: DEBUG nova.compute.manager [req-8d6826b7-32b9-4a50-9831-e6eb7ebadc45 req-e976a035-3a6f-41e5-a52d-18134522b81e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Received event network-vif-deleted-dfd804e8-763f-4fc8-8879-1c9e19cfcadc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1564.030823] env[62525]: INFO nova.compute.manager [req-8d6826b7-32b9-4a50-9831-e6eb7ebadc45 req-e976a035-3a6f-41e5-a52d-18134522b81e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Neutron deleted interface dfd804e8-763f-4fc8-8879-1c9e19cfcadc; detaching it from the instance and deleting it from the info cache [ 1564.030992] env[62525]: DEBUG nova.network.neutron [req-8d6826b7-32b9-4a50-9831-e6eb7ebadc45 req-e976a035-3a6f-41e5-a52d-18134522b81e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.111027] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781518, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.376545] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1564.376835] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369753', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'name': 'volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '81fbb354-21f2-43f0-8aa3-e80e10235326', 'attached_at': '', 'detached_at': '', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'serial': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1564.377861] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b9d8be-5bcc-40a2-afe1-6e12d70d7a86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.396804] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5e506c-c8dd-409a-871e-8b36857b0fc9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.426847] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64/volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.427237] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8ce836f-e60e-449d-be3a-7b2b7cc78203 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.450723] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043456} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.452247] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1564.452593] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.453034] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk to [datastore1] 808491cc-b195-4e81-afa5-86bd6ed8cb25/808491cc-b195-4e81-afa5-86bd6ed8cb25.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.453502] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1564.453502] env[62525]: value = "task-1781520" [ 1564.453502] env[62525]: _type = "Task" [ 1564.453502] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.453981] env[62525]: DEBUG nova.network.neutron [-] [instance: 024c7393-de18-4c76-a27e-757710824494] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.455199] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38d8125e-dfa6-4f31-95a3-2ecd1e54372f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.477467] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1564.477467] env[62525]: value = "task-1781521" [ 1564.477467] env[62525]: _type = "Task" [ 1564.477467] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.487773] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.521253] env[62525]: DEBUG nova.scheduler.client.report [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1564.534903] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef462279-c4e0-4b8e-b35a-669bfc02d8e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.547589] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786404db-f291-4df4-8105-a84a75f1b8c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.591653] env[62525]: DEBUG nova.compute.manager [req-8d6826b7-32b9-4a50-9831-e6eb7ebadc45 req-e976a035-3a6f-41e5-a52d-18134522b81e service nova] [instance: 024c7393-de18-4c76-a27e-757710824494] Detach interface failed, port_id=dfd804e8-763f-4fc8-8879-1c9e19cfcadc, reason: Instance 024c7393-de18-4c76-a27e-757710824494 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1564.608982] env[62525]: DEBUG oslo_vmware.api [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781518, 'name': PowerOnVM_Task, 'duration_secs': 0.571829} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.609379] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1564.609587] env[62525]: INFO nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Took 8.91 seconds to spawn the instance on the hypervisor. [ 1564.609798] env[62525]: DEBUG nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1564.610582] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bb8a86-e666-44c4-bfab-55bca09557bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.959184] env[62525]: INFO nova.compute.manager [-] [instance: 024c7393-de18-4c76-a27e-757710824494] Took 1.52 seconds to deallocate network for instance. [ 1564.971235] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.998256] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.028734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.028734] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1565.032460] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.389s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.032815] env[62525]: DEBUG nova.objects.instance [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lazy-loading 'resources' on Instance uuid bfb20735-1de9-4741-9d6f-5cd2ffedbca6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.132398] env[62525]: INFO nova.compute.manager [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Took 32.20 seconds to build instance. [ 1565.477017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.477017] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781520, 'name': ReconfigVM_Task, 'duration_secs': 0.714587} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.477017] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64/volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.480583] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8011cf9-3512-4982-9eb6-63908a1a9bd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.504036] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.506294] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1565.506294] env[62525]: value = "task-1781522" [ 1565.506294] env[62525]: _type = "Task" [ 1565.506294] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.518548] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781522, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.536632] env[62525]: DEBUG nova.compute.utils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.540230] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1565.540428] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.594396] env[62525]: DEBUG nova.policy [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8109b1ed3a6a4fd88f9ec2c81bae5dee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0847efc6cc8b4a9894c34abfee0384ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1565.636059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-399d8e06-f346-4a68-b22e-5efd018719a4 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.710s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.915830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd7cd92-9d35-42ee-b8fe-23bbf4f41871 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.926553] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4324c557-beec-4331-8baf-9657f125462d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.968554] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80027e9-67d6-4254-9026-013c71f59473 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.988566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c191619-78d7-4fa1-80ca-c282bd92748e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.000843] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.009081] env[62525]: DEBUG nova.compute.provider_tree [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.019341] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Successfully created port: ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.026060] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781522, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.043965] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1566.495886] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.516473] env[62525]: DEBUG nova.scheduler.client.report [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1566.525242] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781522, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.540511] env[62525]: DEBUG nova.compute.manager [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Received event network-changed-6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1566.540792] env[62525]: DEBUG nova.compute.manager [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Refreshing instance network info cache due to event network-changed-6d3644a6-dbaa-4a30-930a-53beadf8704a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1566.541024] env[62525]: DEBUG oslo_concurrency.lockutils [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] Acquiring lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.541582] env[62525]: DEBUG oslo_concurrency.lockutils [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] Acquired lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.541894] env[62525]: DEBUG nova.network.neutron [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Refreshing network info cache for port 6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1566.995822] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781521, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.426367} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.995822] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f384cb86-ee9a-480a-89e0-d09d86894f5f/f384cb86-ee9a-480a-89e0-d09d86894f5f.vmdk to [datastore1] 808491cc-b195-4e81-afa5-86bd6ed8cb25/808491cc-b195-4e81-afa5-86bd6ed8cb25.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1566.996622] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5936ab0b-797d-4003-b711-253c001dff4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.032784] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 808491cc-b195-4e81-afa5-86bd6ed8cb25/808491cc-b195-4e81-afa5-86bd6ed8cb25.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1567.038080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.041073] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c663615d-0e27-43b7-a824-32e744cecd19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.062049] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.694s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.062049] env[62525]: DEBUG nova.objects.instance [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lazy-loading 'resources' on Instance uuid 1badb7d9-692a-445e-ad47-ebd6e19f8197 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.070329] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1567.088033] env[62525]: DEBUG oslo_vmware.api [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781522, 'name': ReconfigVM_Task, 'duration_secs': 1.182413} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.089133] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369753', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'name': 'volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '81fbb354-21f2-43f0-8aa3-e80e10235326', 'attached_at': '', 'detached_at': '', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'serial': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1567.092337] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1567.092337] env[62525]: value = "task-1781523" [ 1567.092337] env[62525]: _type = "Task" [ 1567.092337] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.109137] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.113329] env[62525]: INFO nova.scheduler.client.report [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Deleted allocations for instance bfb20735-1de9-4741-9d6f-5cd2ffedbca6 [ 1567.123437] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1567.123437] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1567.123571] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1567.124164] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1567.124164] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1567.124164] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1567.124322] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1567.124926] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1567.124926] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1567.124926] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1567.124926] env[62525]: DEBUG nova.virt.hardware [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1567.126385] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420a0f98-3d75-440d-9c55-db858534bdae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.139116] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b8df7d-f885-4c82-aba9-978962614bbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.475646] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f64f39-6496-45bc-8c1b-990d648c79dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.484716] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c62a545-80aa-4f25-a7b0-1b2995289aa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.522446] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6a710e-cb92-4f84-824a-80b098194b2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.531950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a34e17-7d0d-4fd1-be25-be0da4e7ff77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.539081] env[62525]: DEBUG nova.network.neutron [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updated VIF entry in instance network info cache for port 6d3644a6-dbaa-4a30-930a-53beadf8704a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.540046] env[62525]: DEBUG nova.network.neutron [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updating instance_info_cache with network_info: [{"id": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "address": "fa:16:3e:fc:c0:64", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3644a6-db", "ovs_interfaceid": "6d3644a6-dbaa-4a30-930a-53beadf8704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.552758] env[62525]: DEBUG nova.compute.provider_tree [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.606080] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781523, 'name': ReconfigVM_Task, 'duration_secs': 0.307947} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.606390] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 808491cc-b195-4e81-afa5-86bd6ed8cb25/808491cc-b195-4e81-afa5-86bd6ed8cb25.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1567.607066] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b9d5877-9dfe-43fd-a20e-feb01a80ffd4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.614604] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1567.614604] env[62525]: value = "task-1781524" [ 1567.614604] env[62525]: _type = "Task" [ 1567.614604] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.629200] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781524, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.635972] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f3f2030-833c-473b-89c0-0b9585eeef20 tempest-InstanceActionsTestJSON-2145601828 tempest-InstanceActionsTestJSON-2145601828-project-member] Lock "bfb20735-1de9-4741-9d6f-5cd2ffedbca6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.202s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.055463] env[62525]: DEBUG oslo_concurrency.lockutils [req-45c8109c-dec3-485d-9818-b99061557639 req-3281628a-d9ae-4334-8f52-710903adcb69 service nova] Releasing lock "refresh_cache-2f713b35-9d07-4d25-a333-506fd2469bd5" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.057012] env[62525]: DEBUG nova.scheduler.client.report [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.126191] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781524, 'name': Rename_Task, 'duration_secs': 0.152899} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.128582] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1568.128582] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78a29260-0d1a-4cc6-bc66-1f83837cd3d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.134782] env[62525]: DEBUG nova.objects.instance [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'flavor' on Instance uuid 81fbb354-21f2-43f0-8aa3-e80e10235326 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1568.137461] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1568.137461] env[62525]: value = "task-1781525" [ 1568.137461] env[62525]: _type = "Task" [ 1568.137461] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.151336] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.221219] env[62525]: DEBUG nova.compute.manager [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Received event network-vif-plugged-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1568.221426] env[62525]: DEBUG oslo_concurrency.lockutils [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] Acquiring lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.221639] env[62525]: DEBUG oslo_concurrency.lockutils [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] Lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.221806] env[62525]: DEBUG oslo_concurrency.lockutils [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] Lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.222509] env[62525]: DEBUG nova.compute.manager [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] No waiting events found dispatching network-vif-plugged-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1568.222685] env[62525]: WARNING nova.compute.manager [req-8a3c1f75-634c-48ed-a85e-04f9c6803d7c req-76be3c16-b1ec-44ff-8425-2a0690155adc service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Received unexpected event network-vif-plugged-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e for instance with vm_state building and task_state spawning. [ 1568.349301] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Successfully updated port: ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1568.561591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.501s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.568825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.249s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.568825] env[62525]: DEBUG nova.objects.instance [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1568.591125] env[62525]: INFO nova.scheduler.client.report [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Deleted allocations for instance 1badb7d9-692a-445e-ad47-ebd6e19f8197 [ 1568.649232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-669632c3-d000-4ce9-907d-b8bb4adc5495 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.399s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.659026] env[62525]: DEBUG oslo_vmware.api [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781525, 'name': PowerOnVM_Task, 'duration_secs': 0.48795} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.660038] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1568.660038] env[62525]: INFO nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Took 15.60 seconds to spawn the instance on the hypervisor. [ 1568.660038] env[62525]: DEBUG nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1568.660996] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6b4220-f263-4a91-be91-d60049aec563 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.814261] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.814554] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.814772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.814961] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.815152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.819125] env[62525]: INFO nova.compute.manager [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Terminating instance [ 1568.822252] env[62525]: DEBUG nova.compute.manager [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1568.822324] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1568.822774] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0386f9e6-644f-4bd4-a54b-73f2ad046a58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.832725] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1568.832725] env[62525]: value = "task-1781526" [ 1568.832725] env[62525]: _type = "Task" [ 1568.832725] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.851519] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781526, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.854128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.854128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquired lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.854128] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.096559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.097038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.103062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5785cba4-2708-45df-a051-063eac6a2878 tempest-ServersNegativeTestMultiTenantJSON-1636855719 tempest-ServersNegativeTestMultiTenantJSON-1636855719-project-member] Lock "1badb7d9-692a-445e-ad47-ebd6e19f8197" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.766s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.186690] env[62525]: INFO nova.compute.manager [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Took 38.19 seconds to build instance. [ 1569.346170] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781526, 'name': PowerOffVM_Task, 'duration_secs': 0.219122} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.346740] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1569.347091] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1569.347445] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369753', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'name': 'volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '81fbb354-21f2-43f0-8aa3-e80e10235326', 'attached_at': '', 'detached_at': '', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'serial': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1569.348746] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f684be8e-69d4-4352-84f4-9b3e22ac7a31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.384018] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d6294c-5473-4a57-8ca7-b8ca3144b673 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.393019] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d986f7-c27b-4922-8f2a-86f21fd8f607 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.420428] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e673d72-c0ad-4de2-a469-c3265b910348 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.439442] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] The volume has not been displaced from its original location: [datastore1] volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64/volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1569.445729] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1569.446730] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1569.448856] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c42d58bb-da8f-4e71-be18-9293aa847982 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.471524] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1569.471524] env[62525]: value = "task-1781527" [ 1569.471524] env[62525]: _type = "Task" [ 1569.471524] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.481724] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.586431] env[62525]: DEBUG oslo_concurrency.lockutils [None req-efeebd8d-de93-4e92-a553-916db216f002 tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.586431] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.969s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.586431] env[62525]: DEBUG nova.objects.instance [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lazy-loading 'resources' on Instance uuid f93669f2-c59d-4f3f-85a2-a60d714326ac {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1569.600508] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1569.660448] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.682194] env[62525]: DEBUG nova.network.neutron [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Updating instance_info_cache with network_info: [{"id": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "address": "fa:16:3e:b1:d2:cb", "network": {"id": "992a4e5d-d1ff-405c-80bd-6aea06cd4d46", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1976992135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0847efc6cc8b4a9894c34abfee0384ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff970c70-fe", "ovs_interfaceid": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.689303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64ae5d8a-c42a-4926-b864-fc20f04e7ac6 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.700s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.689728] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.029s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.689958] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.690222] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.690347] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.692763] env[62525]: INFO nova.compute.manager [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Terminating instance [ 1569.694927] env[62525]: DEBUG nova.compute.manager [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1569.695193] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1569.696150] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d8f9c8-e629-4eb4-a004-dcf2dd080c82 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.704712] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1569.704972] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bc7cc8d-f5ae-4efa-a7cd-eb8665a66303 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.712894] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1569.712894] env[62525]: value = "task-1781528" [ 1569.712894] env[62525]: _type = "Task" [ 1569.712894] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.722949] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.985153] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781527, 'name': ReconfigVM_Task, 'duration_secs': 0.24067} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.985614] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1569.990698] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eab87f8-f2df-49a2-b4d4-50429cff39f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.014114] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1570.014114] env[62525]: value = "task-1781529" [ 1570.014114] env[62525]: _type = "Task" [ 1570.014114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.028138] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781529, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.121506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.187211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Releasing lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.187211] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Instance network_info: |[{"id": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "address": "fa:16:3e:b1:d2:cb", "network": {"id": "992a4e5d-d1ff-405c-80bd-6aea06cd4d46", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1976992135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0847efc6cc8b4a9894c34abfee0384ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff970c70-fe", "ovs_interfaceid": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1570.187211] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:d2:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1570.195832] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Creating folder: Project (0847efc6cc8b4a9894c34abfee0384ad). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1570.197382] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-689af8eb-0362-4f9d-be06-f09e815b17a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.212856] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Created folder: Project (0847efc6cc8b4a9894c34abfee0384ad) in parent group-v369553. [ 1570.212856] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Creating folder: Instances. Parent ref: group-v369754. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1570.213119] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7969a22a-e58a-47d6-916f-eb4bae767b3f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.224814] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781528, 'name': PowerOffVM_Task, 'duration_secs': 0.202075} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.227643] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1570.227833] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1570.229584] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33f18adc-17fa-4d33-9bf7-fa9e8b6ce04b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.234026] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Created folder: Instances in parent group-v369754. [ 1570.234026] env[62525]: DEBUG oslo.service.loopingcall [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.234026] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1570.234026] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5cebe160-81d2-41c0-bfe0-8d8714e09843 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.256173] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1570.256173] env[62525]: value = "task-1781533" [ 1570.256173] env[62525]: _type = "Task" [ 1570.256173] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.267784] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781533, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.312859] env[62525]: DEBUG nova.compute.manager [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Received event network-changed-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.313077] env[62525]: DEBUG nova.compute.manager [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Refreshing instance network info cache due to event network-changed-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1570.313331] env[62525]: DEBUG oslo_concurrency.lockutils [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] Acquiring lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.313476] env[62525]: DEBUG oslo_concurrency.lockutils [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] Acquired lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.313639] env[62525]: DEBUG nova.network.neutron [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Refreshing network info cache for port ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1570.330763] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1570.331028] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1570.331248] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] 808491cc-b195-4e81-afa5-86bd6ed8cb25 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1570.332377] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c2fdb8-7233-485c-a33a-f89eaf845b50 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.343082] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1570.343082] env[62525]: value = "task-1781534" [ 1570.343082] env[62525]: _type = "Task" [ 1570.343082] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.354269] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.509492] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b551f90b-253e-4dd7-8229-2f9b0375b826 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.521740] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312b6867-bae4-4486-b7e8-e96d8f800e27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.529649] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781529, 'name': ReconfigVM_Task, 'duration_secs': 0.300705} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.557072] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369753', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'name': 'volume-42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '81fbb354-21f2-43f0-8aa3-e80e10235326', 'attached_at': '', 'detached_at': '', 'volume_id': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64', 'serial': '42c6b2b4-d3a1-429c-bad7-7c6c77797b64'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1570.557072] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1570.558142] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9ced78-95b3-4a29-8122-71314e09fa89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.562790] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741edb44-5f0e-44eb-a1e0-19e80e472997 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.571571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1570.574509] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25fb8586-e25d-4b85-9302-abc0dbef4150 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.577341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e21a3c0-6f99-4a19-8d4d-0e3a386d8991 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.601151] env[62525]: DEBUG nova.compute.provider_tree [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.669616] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1570.669844] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1570.670109] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] 81fbb354-21f2-43f0-8aa3-e80e10235326 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1570.670554] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0398255e-6177-48b4-9231-25909f352148 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.682020] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1570.682020] env[62525]: value = "task-1781536" [ 1570.682020] env[62525]: _type = "Task" [ 1570.682020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.692513] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.772606] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781533, 'name': CreateVM_Task, 'duration_secs': 0.48813} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.772789] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1570.773742] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.773915] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.774544] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1570.774633] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ae4d0e-2775-490d-ad06-bb3ee4ddca0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.782137] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1570.782137] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526ba13a-e5ad-4fad-7f4e-4a4f32f7ab34" [ 1570.782137] env[62525]: _type = "Task" [ 1570.782137] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.791785] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526ba13a-e5ad-4fad-7f4e-4a4f32f7ab34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.861678] env[62525]: DEBUG oslo_vmware.api [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164512} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.861817] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.862044] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1570.862298] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1570.862428] env[62525]: INFO nova.compute.manager [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1570.862865] env[62525]: DEBUG oslo.service.loopingcall [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.862865] env[62525]: DEBUG nova.compute.manager [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1570.862974] env[62525]: DEBUG nova.network.neutron [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1571.107314] env[62525]: DEBUG nova.scheduler.client.report [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1571.194433] env[62525]: DEBUG oslo_vmware.api [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161273} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.194757] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1571.195044] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1571.195223] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1571.195402] env[62525]: INFO nova.compute.manager [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Took 2.37 seconds to destroy the instance on the hypervisor. [ 1571.195647] env[62525]: DEBUG oslo.service.loopingcall [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.196026] env[62525]: DEBUG nova.compute.manager [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1571.196026] env[62525]: DEBUG nova.network.neutron [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1571.293987] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526ba13a-e5ad-4fad-7f4e-4a4f32f7ab34, 'name': SearchDatastore_Task, 'duration_secs': 0.009946} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.297963] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.297963] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1571.298144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.298364] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.298560] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.298900] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34e28e61-8409-4f28-815e-cdf5af2e9372 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.308391] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.308581] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1571.309405] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a1bfeb-8ec2-4d70-a75d-fdc32b138707 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.317894] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1571.317894] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523f0485-e86b-3b70-9133-b240ddd2eb8b" [ 1571.317894] env[62525]: _type = "Task" [ 1571.317894] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.329622] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f0485-e86b-3b70-9133-b240ddd2eb8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.334422] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.334655] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.370991] env[62525]: DEBUG nova.network.neutron [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Updated VIF entry in instance network info cache for port ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1571.371386] env[62525]: DEBUG nova.network.neutron [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Updating instance_info_cache with network_info: [{"id": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "address": "fa:16:3e:b1:d2:cb", "network": {"id": "992a4e5d-d1ff-405c-80bd-6aea06cd4d46", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1976992135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0847efc6cc8b4a9894c34abfee0384ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff970c70-fe", "ovs_interfaceid": "ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.576327] env[62525]: DEBUG nova.compute.manager [req-0cac15e9-3430-43f1-826e-e56e10c94753 req-1f85ca9b-1f20-463f-b6b2-26b1e337a736 service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Received event network-vif-deleted-6b0336ac-59dc-4910-adf2-48b4b073fdb6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1571.576327] env[62525]: INFO nova.compute.manager [req-0cac15e9-3430-43f1-826e-e56e10c94753 req-1f85ca9b-1f20-463f-b6b2-26b1e337a736 service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Neutron deleted interface 6b0336ac-59dc-4910-adf2-48b4b073fdb6; detaching it from the instance and deleting it from the info cache [ 1571.576327] env[62525]: DEBUG nova.network.neutron [req-0cac15e9-3430-43f1-826e-e56e10c94753 req-1f85ca9b-1f20-463f-b6b2-26b1e337a736 service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.618625] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.032s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.621017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.050s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.621286] env[62525]: DEBUG nova.objects.instance [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lazy-loading 'resources' on Instance uuid a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1571.642581] env[62525]: INFO nova.scheduler.client.report [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted allocations for instance f93669f2-c59d-4f3f-85a2-a60d714326ac [ 1571.834034] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f0485-e86b-3b70-9133-b240ddd2eb8b, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.834918] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb5ba259-f2d8-4830-8e7c-1449176b9995 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.838189] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1571.844787] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1571.844787] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b7f9a5-50b8-b3de-d3fd-5f7b37125b82" [ 1571.844787] env[62525]: _type = "Task" [ 1571.844787] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.853621] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b7f9a5-50b8-b3de-d3fd-5f7b37125b82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.874861] env[62525]: DEBUG oslo_concurrency.lockutils [req-6dbaaac3-997f-422d-8d3e-1cadd83da30a req-5fb0d68e-08bd-477c-8534-67060feff97f service nova] Releasing lock "refresh_cache-16667060-2172-4c1b-a3c8-340bb38846cf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.927154] env[62525]: DEBUG nova.network.neutron [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.053272] env[62525]: DEBUG nova.network.neutron [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.078556] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05c6a022-0d9e-4dce-8ad9-cf8c52e563af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.090409] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403e5ee3-8243-433d-86b8-27adfff19f7f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.129241] env[62525]: DEBUG nova.compute.manager [req-0cac15e9-3430-43f1-826e-e56e10c94753 req-1f85ca9b-1f20-463f-b6b2-26b1e337a736 service nova] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Detach interface failed, port_id=6b0336ac-59dc-4910-adf2-48b4b073fdb6, reason: Instance 81fbb354-21f2-43f0-8aa3-e80e10235326 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1572.154908] env[62525]: DEBUG oslo_concurrency.lockutils [None req-692f11f3-e44a-4c21-9d6b-211d3ab91810 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "f93669f2-c59d-4f3f-85a2-a60d714326ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.461s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.345991] env[62525]: DEBUG nova.compute.manager [req-300d544b-33e5-4796-b33e-5cfa5abab1eb req-22d42bfb-6317-4d45-8088-a248749ab786 service nova] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Received event network-vif-deleted-f2cd5102-dd15-44ea-9596-d4251d37814c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1572.361473] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b7f9a5-50b8-b3de-d3fd-5f7b37125b82, 'name': SearchDatastore_Task, 'duration_secs': 0.011277} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.362502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.362853] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.363253] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 16667060-2172-4c1b-a3c8-340bb38846cf/16667060-2172-4c1b-a3c8-340bb38846cf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1572.365805] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdbad259-0716-4358-bfd3-64e3e10398dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.374528] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1572.374528] env[62525]: value = "task-1781537" [ 1572.374528] env[62525]: _type = "Task" [ 1572.374528] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.388185] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.429643] env[62525]: INFO nova.compute.manager [-] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Took 1.57 seconds to deallocate network for instance. [ 1572.435843] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26349be8-be2b-4dc7-842c-3f27208390b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.444236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faa65e1-c0ac-4adc-a5d1-38ce859a1aac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.478410] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0a157c-84e8-49aa-8385-b4ed96b31481 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.488328] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7850c85-cb84-41da-a61a-fa2ec03b6f8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.505477] env[62525]: DEBUG nova.compute.provider_tree [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.556116] env[62525]: INFO nova.compute.manager [-] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Took 1.36 seconds to deallocate network for instance. [ 1572.749946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.750746] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.750746] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.751023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.751023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.753686] env[62525]: INFO nova.compute.manager [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Terminating instance [ 1572.756102] env[62525]: DEBUG nova.compute.manager [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1572.756336] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1572.760030] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f351f1e6-545a-4e28-b462-1766e3573d92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.768921] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1572.769269] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8ce1584-23c0-4ce5-9110-573c26bf7f58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.778017] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1572.778017] env[62525]: value = "task-1781538" [ 1572.778017] env[62525]: _type = "Task" [ 1572.778017] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.789763] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.887808] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781537, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.940305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.011232] env[62525]: DEBUG nova.scheduler.client.report [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1573.110578] env[62525]: INFO nova.compute.manager [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Took 0.55 seconds to detach 1 volumes for instance. [ 1573.290608] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781538, 'name': PowerOffVM_Task, 'duration_secs': 0.385904} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.291035] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1573.291230] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1573.291502] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e01f512-3974-4efb-8c00-63b49ffa8693 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.389778] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610327} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.390242] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 16667060-2172-4c1b-a3c8-340bb38846cf/16667060-2172-4c1b-a3c8-340bb38846cf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1573.390483] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1573.390740] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73b68805-df44-4603-be39-b7fc9ba12081 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.394383] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1573.394579] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1573.394781] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleting the datastore file [datastore1] 56cb0d0c-a7dd-4158-8bed-ddff050e0226 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1573.395428] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a554a9d-28c4-47ad-9278-02716b342f26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.401602] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for the task: (returnval){ [ 1573.401602] env[62525]: value = "task-1781541" [ 1573.401602] env[62525]: _type = "Task" [ 1573.401602] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.402684] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1573.402684] env[62525]: value = "task-1781540" [ 1573.402684] env[62525]: _type = "Task" [ 1573.402684] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.415875] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.419871] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.515038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.517381] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.151s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.517618] env[62525]: DEBUG nova.objects.instance [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lazy-loading 'resources' on Instance uuid 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1573.551181] env[62525]: INFO nova.scheduler.client.report [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Deleted allocations for instance a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca [ 1573.621050] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.916265] env[62525]: DEBUG oslo_vmware.api [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Task: {'id': task-1781541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145748} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.919264] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1573.919923] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1573.919923] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1573.919923] env[62525]: INFO nova.compute.manager [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1573.920131] env[62525]: DEBUG oslo.service.loopingcall [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.920363] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069755} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.920560] env[62525]: DEBUG nova.compute.manager [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1573.920653] env[62525]: DEBUG nova.network.neutron [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1573.922582] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1573.923398] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822db45d-3dad-4150-821d-bf7b92981801 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.947103] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 16667060-2172-4c1b-a3c8-340bb38846cf/16667060-2172-4c1b-a3c8-340bb38846cf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1573.947427] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0be894f1-3f8e-430b-9a92-ecef1b94ea98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.971205] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1573.971205] env[62525]: value = "task-1781542" [ 1573.971205] env[62525]: _type = "Task" [ 1573.971205] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.982151] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.059193] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dd1eaea5-6a36-48f8-9344-d820a925f80c tempest-ServerShowV257Test-1435304210 tempest-ServerShowV257Test-1435304210-project-member] Lock "a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.236s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.201648] env[62525]: DEBUG nova.compute.manager [req-af26bcda-4361-4c4b-8b21-47b47e09514e req-01fb06d8-90f4-4e4d-84af-728e04854146 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Received event network-vif-deleted-44ff1acd-1593-43a1-95fd-aceba913d7d5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1574.201876] env[62525]: INFO nova.compute.manager [req-af26bcda-4361-4c4b-8b21-47b47e09514e req-01fb06d8-90f4-4e4d-84af-728e04854146 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Neutron deleted interface 44ff1acd-1593-43a1-95fd-aceba913d7d5; detaching it from the instance and deleting it from the info cache [ 1574.202086] env[62525]: DEBUG nova.network.neutron [req-af26bcda-4361-4c4b-8b21-47b47e09514e req-01fb06d8-90f4-4e4d-84af-728e04854146 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.333630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01db6116-402f-4ab8-b3c7-da57f2cf620a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.342774] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43cf1c6-cabc-4e73-aa50-46d4c6852a42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.375211] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39350ac3-c09d-40d4-9444-4e43a7dc4cbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.383677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6dae5a-491a-455d-8d1f-345aa9038d20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.398181] env[62525]: DEBUG nova.compute.provider_tree [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.481872] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781542, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.672429] env[62525]: DEBUG nova.network.neutron [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.704972] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80f1fc02-881e-46b3-ae4d-2b227fb4020b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.716199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1575fc81-8bbb-4459-9e47-ca7adce9d0e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.752176] env[62525]: DEBUG nova.compute.manager [req-af26bcda-4361-4c4b-8b21-47b47e09514e req-01fb06d8-90f4-4e4d-84af-728e04854146 service nova] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Detach interface failed, port_id=44ff1acd-1593-43a1-95fd-aceba913d7d5, reason: Instance 56cb0d0c-a7dd-4158-8bed-ddff050e0226 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1574.901577] env[62525]: DEBUG nova.scheduler.client.report [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.983049] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781542, 'name': ReconfigVM_Task, 'duration_secs': 0.57451} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.983363] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 16667060-2172-4c1b-a3c8-340bb38846cf/16667060-2172-4c1b-a3c8-340bb38846cf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1574.984243] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95b5cccf-b4c3-41fc-aa8c-54bca05816c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.992908] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1574.992908] env[62525]: value = "task-1781543" [ 1574.992908] env[62525]: _type = "Task" [ 1574.992908] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.001635] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781543, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.175911] env[62525]: INFO nova.compute.manager [-] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Took 1.25 seconds to deallocate network for instance. [ 1575.406865] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.410030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.936s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.410030] env[62525]: DEBUG nova.objects.instance [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lazy-loading 'resources' on Instance uuid 024c7393-de18-4c76-a27e-757710824494 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1575.430259] env[62525]: INFO nova.scheduler.client.report [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Deleted allocations for instance 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9 [ 1575.507423] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781543, 'name': Rename_Task, 'duration_secs': 0.155812} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.508138] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1575.508486] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbca657d-52ce-4152-86c1-b842070d4b01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.516479] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1575.516479] env[62525]: value = "task-1781544" [ 1575.516479] env[62525]: _type = "Task" [ 1575.516479] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.527497] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.683157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.941922] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9a872539-e9d2-4e18-b375-3df68de7d225 tempest-ListImageFiltersTestJSON-1647952237 tempest-ListImageFiltersTestJSON-1647952237-project-member] Lock "5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.663s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.026463] env[62525]: DEBUG oslo_vmware.api [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781544, 'name': PowerOnVM_Task, 'duration_secs': 0.448013} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.028944] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1576.029649] env[62525]: INFO nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1576.029649] env[62525]: DEBUG nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1576.033628] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff80bb6-da25-4e7b-81c6-26755b27a75c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.200994] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb92adb-4dc7-42c6-b88c-801444c7747f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.210670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898b9d01-7b3a-4c63-8edd-1a4eea343b81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.246236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6dd22d-a3c7-4a3b-b9fb-6ee6a3a2cd6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.255192] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ad87d2-e9d4-4fb1-855c-19419a61d670 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.270621] env[62525]: DEBUG nova.compute.provider_tree [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.555861] env[62525]: INFO nova.compute.manager [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Took 23.25 seconds to build instance. [ 1576.774331] env[62525]: DEBUG nova.scheduler.client.report [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.061193] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f126b3-554f-4d27-b04a-c58aa7bb26ee tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.770s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.279368] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.281708] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.160s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.283790] env[62525]: INFO nova.compute.claims [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1577.309317] env[62525]: INFO nova.scheduler.client.report [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Deleted allocations for instance 024c7393-de18-4c76-a27e-757710824494 [ 1577.816057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-291976d3-627a-43ea-9931-8f27401e09be tempest-ImagesOneServerNegativeTestJSON-1137160648 tempest-ImagesOneServerNegativeTestJSON-1137160648-project-member] Lock "024c7393-de18-4c76-a27e-757710824494" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.131s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.822664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "16667060-2172-4c1b-a3c8-340bb38846cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.822883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.823093] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.823326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.823468] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.825485] env[62525]: INFO nova.compute.manager [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Terminating instance [ 1577.827564] env[62525]: DEBUG nova.compute.manager [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1577.827808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1577.828728] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e1a25e-07d4-47a7-9279-7848caf90958 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.839321] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1577.839594] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c084153a-3f66-4022-b71e-d7196f435819 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.848192] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1577.848192] env[62525]: value = "task-1781545" [ 1577.848192] env[62525]: _type = "Task" [ 1577.848192] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.865191] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.366275] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781545, 'name': PowerOffVM_Task, 'duration_secs': 0.213144} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.366275] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1578.366275] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1578.366680] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df200052-8cca-4ef1-8974-f89f4d9c9e52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.576732] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c7550e-aa50-4403-a675-440ef50aae65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.584743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.585183] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.589724] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17413a9b-50d4-49a2-8150-8cefc2b223dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.626012] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd86bd8-da77-49fb-9bae-758271b9e22a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.633812] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c43864-a7b9-4c05-b21d-db2c8f0ffea7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.648717] env[62525]: DEBUG nova.compute.provider_tree [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.090436] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1579.152902] env[62525]: DEBUG nova.scheduler.client.report [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1579.230098] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1579.230332] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1579.230519] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Deleting the datastore file [datastore1] 16667060-2172-4c1b-a3c8-340bb38846cf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1579.230804] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c43c5a74-3cc3-4f20-9885-306e4ecb80d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.239112] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for the task: (returnval){ [ 1579.239112] env[62525]: value = "task-1781547" [ 1579.239112] env[62525]: _type = "Task" [ 1579.239112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.249299] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.613320] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.662171] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.662171] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1579.665040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.302s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.666441] env[62525]: INFO nova.compute.claims [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1579.750516] env[62525]: DEBUG oslo_vmware.api [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Task: {'id': task-1781547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156541} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.751019] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1579.751019] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1579.751322] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1579.751433] env[62525]: INFO nova.compute.manager [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Took 1.92 seconds to destroy the instance on the hypervisor. [ 1579.751718] env[62525]: DEBUG oslo.service.loopingcall [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.751919] env[62525]: DEBUG nova.compute.manager [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1579.752023] env[62525]: DEBUG nova.network.neutron [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1580.091314] env[62525]: DEBUG nova.compute.manager [req-67093c61-3cef-45cd-ba06-6f9391e7a03c req-f9e48fe7-9870-4981-afd0-0502277f55b1 service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Received event network-vif-deleted-ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1580.091543] env[62525]: INFO nova.compute.manager [req-67093c61-3cef-45cd-ba06-6f9391e7a03c req-f9e48fe7-9870-4981-afd0-0502277f55b1 service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Neutron deleted interface ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e; detaching it from the instance and deleting it from the info cache [ 1580.091715] env[62525]: DEBUG nova.network.neutron [req-67093c61-3cef-45cd-ba06-6f9391e7a03c req-f9e48fe7-9870-4981-afd0-0502277f55b1 service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.174385] env[62525]: DEBUG nova.compute.utils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1580.175734] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1580.175897] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1580.214805] env[62525]: DEBUG nova.policy [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7f91d593ab442049db42852ea6edd23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af5258cd7a314fc784be2d2e33e6eceb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1580.501995] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Successfully created port: dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1580.504338] env[62525]: DEBUG nova.network.neutron [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.595232] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33d31fd7-4275-4e73-8d4b-0a5e64caba0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.606314] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d3c89b-4daa-4b16-80e2-7549cd1ce667 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.641648] env[62525]: DEBUG nova.compute.manager [req-67093c61-3cef-45cd-ba06-6f9391e7a03c req-f9e48fe7-9870-4981-afd0-0502277f55b1 service nova] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Detach interface failed, port_id=ff970c70-fe35-41f6-8b4a-9c7a1bb24a1e, reason: Instance 16667060-2172-4c1b-a3c8-340bb38846cf could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1580.681623] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1580.951900] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ffb77d-cc42-4e40-a3bb-ddb582cf7e3a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.961889] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008c7fa0-b511-410f-8ffb-3e6007f2b61d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.992676] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12588846-147c-4443-96af-2db2cf80cb8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.000777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2692feff-5c41-48a4-940d-94e154bdf979 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.007399] env[62525]: INFO nova.compute.manager [-] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Took 1.26 seconds to deallocate network for instance. [ 1581.016438] env[62525]: DEBUG nova.compute.provider_tree [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.521804] env[62525]: DEBUG nova.scheduler.client.report [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1581.525865] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.689411] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1581.716423] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1581.716727] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1581.716884] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.717077] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1581.717276] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.717434] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1581.717641] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1581.717797] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1581.717962] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1581.718141] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1581.718314] env[62525]: DEBUG nova.virt.hardware [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1581.719221] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f9c2c5-a7fd-4838-a1ed-4158187637fb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.729948] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cddd5f-2791-4848-bf8a-6aef82144645 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.027585] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.028134] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1582.030861] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.091s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.031085] env[62525]: DEBUG nova.objects.instance [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'resources' on Instance uuid 808491cc-b195-4e81-afa5-86bd6ed8cb25 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1582.062505] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Successfully updated port: dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1582.115952] env[62525]: DEBUG nova.compute.manager [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.116166] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.116381] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.116546] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.116709] env[62525]: DEBUG nova.compute.manager [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] No waiting events found dispatching network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1582.116875] env[62525]: WARNING nova.compute.manager [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received unexpected event network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 for instance with vm_state building and task_state spawning. [ 1582.117040] env[62525]: DEBUG nova.compute.manager [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1582.117195] env[62525]: DEBUG nova.compute.manager [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing instance network info cache due to event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1582.117385] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.117522] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.117672] env[62525]: DEBUG nova.network.neutron [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing network info cache for port dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.534390] env[62525]: DEBUG nova.compute.utils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1582.535836] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1582.536018] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1582.565560] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.578373] env[62525]: DEBUG nova.policy [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '391b41cf09fd42879d3f5cd3153c2045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a46df22dac6f473b8395f9302c3a4a75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1582.650405] env[62525]: DEBUG nova.network.neutron [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1582.723035] env[62525]: DEBUG nova.network.neutron [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.777283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968ca915-2b1e-4ce6-b6ee-3934a93db68c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.784073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7237ea63-917f-41ed-9e90-07f95c0e6d60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.815753] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afb106d-0aa8-407a-ac0a-b7d11c1222f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.823695] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3b7219-0866-4e05-ac35-188b54af85b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.841433] env[62525]: DEBUG nova.compute.provider_tree [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.892684] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Successfully created port: fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1583.040833] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1583.226224] env[62525]: DEBUG oslo_concurrency.lockutils [req-7dbf3cf3-8e35-42b9-81d8-1641cf35547b req-3a87b899-1207-4c42-a740-bfa7adb51632 service nova] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.226566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.226730] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1583.345421] env[62525]: DEBUG nova.scheduler.client.report [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1583.757605] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1583.850171] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.854980] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.235s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.855245] env[62525]: DEBUG nova.objects.instance [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'resources' on Instance uuid 81fbb354-21f2-43f0-8aa3-e80e10235326 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.871848] env[62525]: INFO nova.scheduler.client.report [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted allocations for instance 808491cc-b195-4e81-afa5-86bd6ed8cb25 [ 1583.892939] env[62525]: DEBUG nova.network.neutron [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.051123] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1584.081043] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1584.081300] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1584.081458] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.081640] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1584.081784] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.081939] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1584.085958] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1584.086191] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1584.086379] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1584.086553] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1584.086728] env[62525]: DEBUG nova.virt.hardware [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1584.087630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42cbbd6-3b8a-4524-b7e6-2ac710281b82 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.098076] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5895c536-ae9a-43a9-af12-91bff34f7772 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.326360] env[62525]: DEBUG nova.compute.manager [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Received event network-vif-plugged-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1584.326597] env[62525]: DEBUG oslo_concurrency.lockutils [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.326804] env[62525]: DEBUG oslo_concurrency.lockutils [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] Lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.326972] env[62525]: DEBUG oslo_concurrency.lockutils [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] Lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.327256] env[62525]: DEBUG nova.compute.manager [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] No waiting events found dispatching network-vif-plugged-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1584.327380] env[62525]: WARNING nova.compute.manager [req-70917de0-1c92-4034-bc55-316ae8867cb2 req-b2960dd3-3eb2-4273-a286-3f999f048d17 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Received unexpected event network-vif-plugged-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 for instance with vm_state building and task_state spawning. [ 1584.384014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-765b4c8f-eafc-4915-9106-aee568819fbe tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "808491cc-b195-4e81-afa5-86bd6ed8cb25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.694s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.396509] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.396868] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance network_info: |[{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1584.398251] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:5d:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc247c87-0d2d-47bf-9d66-5e81d9237fa6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1584.405909] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating folder: Project (af5258cd7a314fc784be2d2e33e6eceb). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1584.406208] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5c1e319-799a-441d-bdfe-aef0d5b01c28 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.419561] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created folder: Project (af5258cd7a314fc784be2d2e33e6eceb) in parent group-v369553. [ 1584.419777] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating folder: Instances. Parent ref: group-v369757. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1584.420296] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c5b2e95-86e5-4478-aedc-987ffcf58681 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.438451] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created folder: Instances in parent group-v369757. [ 1584.438572] env[62525]: DEBUG oslo.service.loopingcall [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1584.439520] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Successfully updated port: fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1584.441025] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1584.441025] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e3e11c6-ad65-47cf-aa55-9adfe85663be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.464961] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1584.464961] env[62525]: value = "task-1781550" [ 1584.464961] env[62525]: _type = "Task" [ 1584.464961] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.475475] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781550, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.610038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f96bac-af5c-4a07-80bd-888e2288b40b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.618287] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d1879a-a077-438a-8dc7-056f8c79f98c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.649921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99ffbb6-1bf6-47ed-9336-00cd05c47a0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.658044] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8590c334-035c-4aff-bef8-c74144af6fb8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.671818] env[62525]: DEBUG nova.compute.provider_tree [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.957600] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.957965] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.958161] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.977182] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781550, 'name': CreateVM_Task, 'duration_secs': 0.326703} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.977352] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1584.978031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.978213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.978507] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1584.978752] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2bd52b9-b3f4-4824-a261-e10f1ab8e857 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.983711] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1584.983711] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52aa37bc-1d2b-935e-43e6-38b47a312b9c" [ 1584.983711] env[62525]: _type = "Task" [ 1584.983711] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.991680] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aa37bc-1d2b-935e-43e6-38b47a312b9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.175772] env[62525]: DEBUG nova.scheduler.client.report [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1585.243714] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.244582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.244582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.244582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.244582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.247123] env[62525]: INFO nova.compute.manager [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Terminating instance [ 1585.248870] env[62525]: DEBUG nova.compute.manager [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1585.249083] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1585.249904] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7308ea-9791-4491-8ee0-9624babd1a7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.257730] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1585.257955] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6cd1424-a952-44d1-a9d4-f5dfd77df112 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.264332] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1585.264332] env[62525]: value = "task-1781551" [ 1585.264332] env[62525]: _type = "Task" [ 1585.264332] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.272202] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781551, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.494402] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aa37bc-1d2b-935e-43e6-38b47a312b9c, 'name': SearchDatastore_Task, 'duration_secs': 0.012261} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.494630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.494806] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.495048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.495202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.495381] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.495647] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d5f4d4e-c125-48e4-8faa-1bf305f4df48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.498026] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1585.506896] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.507086] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1585.507784] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-053ce29a-0d42-4608-abf3-d6dd17147c0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.513282] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1585.513282] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b8a190-2621-3f99-eced-ed1a06012a0c" [ 1585.513282] env[62525]: _type = "Task" [ 1585.513282] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.523231] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b8a190-2621-3f99-eced-ed1a06012a0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.628643] env[62525]: DEBUG nova.network.neutron [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating instance_info_cache with network_info: [{"id": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "address": "fa:16:3e:05:50:cc", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfadf8b76-5c", "ovs_interfaceid": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.680219] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.682913] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.682913] env[62525]: DEBUG nova.objects.instance [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lazy-loading 'resources' on Instance uuid 56cb0d0c-a7dd-4158-8bed-ddff050e0226 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.708530] env[62525]: INFO nova.scheduler.client.report [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance 81fbb354-21f2-43f0-8aa3-e80e10235326 [ 1585.745106] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.745394] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.775841] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781551, 'name': PowerOffVM_Task, 'duration_secs': 0.184512} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.775841] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1585.776777] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1585.776777] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7607478e-914f-4a14-80f1-adb2a842de18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.855997] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1585.856332] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1585.856556] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] cafae62e-b001-4ee0-8e89-4da9c60cf488 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1585.856864] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e82273ba-c59f-4c5f-8839-ae515eb0f34e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.863194] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1585.863194] env[62525]: value = "task-1781553" [ 1585.863194] env[62525]: _type = "Task" [ 1585.863194] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.871295] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.027150] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b8a190-2621-3f99-eced-ed1a06012a0c, 'name': SearchDatastore_Task, 'duration_secs': 0.009303} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.027915] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35698578-a1f7-4367-aaca-30b81505c624 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.033024] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1586.033024] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521b47cb-a825-2261-e8f3-af944ccd9da6" [ 1586.033024] env[62525]: _type = "Task" [ 1586.033024] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.040909] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521b47cb-a825-2261-e8f3-af944ccd9da6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.131786] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.132157] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Instance network_info: |[{"id": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "address": "fa:16:3e:05:50:cc", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfadf8b76-5c", "ovs_interfaceid": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1586.132747] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:50:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afd3feb3-ffcc-4499-a2c2-eb6a48aefde9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fadf8b76-5c96-4ca1-a32f-c85a68c3fb21', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1586.140095] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating folder: Project (a46df22dac6f473b8395f9302c3a4a75). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1586.140393] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d98a7ae1-fcba-4987-afc7-c14bd89f11f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.152322] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created folder: Project (a46df22dac6f473b8395f9302c3a4a75) in parent group-v369553. [ 1586.152618] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating folder: Instances. Parent ref: group-v369760. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1586.152881] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c97c60d8-103f-424b-9dde-2475712c874e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.163458] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created folder: Instances in parent group-v369760. [ 1586.163757] env[62525]: DEBUG oslo.service.loopingcall [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.163981] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1586.164225] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d24176b1-e5f3-4cc5-9704-c7fa8d2da6e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.187858] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1586.187858] env[62525]: value = "task-1781556" [ 1586.187858] env[62525]: _type = "Task" [ 1586.187858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.196720] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781556, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.215941] env[62525]: DEBUG oslo_concurrency.lockutils [None req-17e636b2-1ce5-4692-8df6-c9bd69655b95 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "81fbb354-21f2-43f0-8aa3-e80e10235326" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.401s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.248908] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1586.353628] env[62525]: DEBUG nova.compute.manager [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Received event network-changed-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1586.353930] env[62525]: DEBUG nova.compute.manager [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Refreshing instance network info cache due to event network-changed-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1586.354251] env[62525]: DEBUG oslo_concurrency.lockutils [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] Acquiring lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.354460] env[62525]: DEBUG oslo_concurrency.lockutils [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] Acquired lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.354708] env[62525]: DEBUG nova.network.neutron [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Refreshing network info cache for port fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1586.374968] env[62525]: DEBUG oslo_vmware.api [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156671} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.377781] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1586.377997] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1586.378247] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1586.378561] env[62525]: INFO nova.compute.manager [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1586.378692] env[62525]: DEBUG oslo.service.loopingcall [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.379783] env[62525]: DEBUG nova.compute.manager [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1586.379882] env[62525]: DEBUG nova.network.neutron [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1586.467011] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cda47af-d437-4ec4-af48-de5ddd3008b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.479069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d862a2-b232-469b-a677-3f8838899e0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.513415] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e395d2de-2910-42ea-8beb-4f247789a6e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.521894] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76020136-29e1-40ff-bb03-bff5c425ea67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.536934] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1586.547751] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521b47cb-a825-2261-e8f3-af944ccd9da6, 'name': SearchDatastore_Task, 'duration_secs': 0.014323} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.548774] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.548957] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1586.549379] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deaa4571-54a6-4be4-9278-cb103cd6b474 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.556944] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1586.556944] env[62525]: value = "task-1781557" [ 1586.556944] env[62525]: _type = "Task" [ 1586.556944] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.567011] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.702700] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781556, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.772871] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.064739] env[62525]: ERROR nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [req-3bc3e6e7-e050-49eb-a34b-de7438e99b06] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3bc3e6e7-e050-49eb-a34b-de7438e99b06"}]} [ 1587.071912] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781557, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.094158] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1587.112782] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1587.113038] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1587.130237] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1587.152233] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1587.203836] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781556, 'name': CreateVM_Task, 'duration_secs': 0.611648} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.204041] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1587.204787] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.204959] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.205323] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1587.205595] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f82eccc5-6049-4adb-b940-4041c19dce36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.211221] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1587.211221] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525752c8-e2d6-0167-a309-d17684250494" [ 1587.211221] env[62525]: _type = "Task" [ 1587.211221] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.216946] env[62525]: DEBUG nova.network.neutron [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.223654] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]525752c8-e2d6-0167-a309-d17684250494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.225575] env[62525]: DEBUG nova.network.neutron [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updated VIF entry in instance network info cache for port fadf8b76-5c96-4ca1-a32f-c85a68c3fb21. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1587.225575] env[62525]: DEBUG nova.network.neutron [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating instance_info_cache with network_info: [{"id": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "address": "fa:16:3e:05:50:cc", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfadf8b76-5c", "ovs_interfaceid": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.431854] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111f9dc0-16db-4222-8db7-9e24ae05f90c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.440370] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfe7b8b-bddb-4e79-8f8c-fcaa664acaee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.472787] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdd819a-fc62-4c92-8986-77483f90247a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.480826] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a7f479-57ca-4962-8da8-0b6ef0fee07f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.494785] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1587.567468] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781557, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.721067] env[62525]: INFO nova.compute.manager [-] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Took 1.34 seconds to deallocate network for instance. [ 1587.721399] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]525752c8-e2d6-0167-a309-d17684250494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.731715] env[62525]: DEBUG oslo_concurrency.lockutils [req-e3d6e285-1f96-4188-9aec-29d32adb6b15 req-1cc5215a-eaac-4b69-9014-10d27eaa9739 service nova] Releasing lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.015069] env[62525]: ERROR nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] [req-e4be5381-fea3-42ba-935f-8f26abc4142f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e4be5381-fea3-42ba-935f-8f26abc4142f"}]} [ 1588.031180] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1588.044059] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1588.044303] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.055537] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1588.067387] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781557, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.070628] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1588.220890] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]525752c8-e2d6-0167-a309-d17684250494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.228784] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.288407] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a88aef-0e54-4dde-b0e9-d20949dad83f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.295932] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db1c386-d1ad-439b-b198-aaa9d205f7ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.328375] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2535dd-b90a-4ce6-bf2b-3ce7165bcbbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.336586] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33873b1-78e4-4075-9998-8c80cc9e17f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.350194] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.378429] env[62525]: DEBUG nova.compute.manager [req-6adc29e7-329e-4cfb-9f63-ae1f615c19fd req-78ec41c3-0292-42ff-897c-e3780d605387 service nova] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Received event network-vif-deleted-8fd85844-87de-4df5-a881-9ed796e8af51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.447454] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.447689] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.568749] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781557, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.861982} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.568749] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1588.568968] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1588.569782] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-127e97e3-6a46-4eae-ae07-1f942f70aa1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.575734] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1588.575734] env[62525]: value = "task-1781558" [ 1588.575734] env[62525]: _type = "Task" [ 1588.575734] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.584247] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.722055] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]525752c8-e2d6-0167-a309-d17684250494, 'name': SearchDatastore_Task, 'duration_secs': 1.19504} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.722366] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.722655] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1588.722889] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.723050] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.723238] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1588.723501] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b99a3b5-c079-436b-a975-05a21dde01cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.732787] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1588.732955] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1588.733670] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f20135-77df-4477-9ac1-9e0d733b99e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.738633] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1588.738633] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dc1f87-21e6-0f32-4a9a-a744dba17c50" [ 1588.738633] env[62525]: _type = "Task" [ 1588.738633] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.745909] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc1f87-21e6-0f32-4a9a-a744dba17c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.882092] env[62525]: DEBUG nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1588.882367] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 88 to 89 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1588.882604] env[62525]: DEBUG nova.compute.provider_tree [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.949442] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1589.086727] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082177} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.086727] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.086727] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84b54fd-eec8-46ad-9e19-df9c005928f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.107610] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.107863] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68749306-d475-4f45-a008-39b09a15644b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.128031] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1589.128031] env[62525]: value = "task-1781559" [ 1589.128031] env[62525]: _type = "Task" [ 1589.128031] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.135760] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.249680] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dc1f87-21e6-0f32-4a9a-a744dba17c50, 'name': SearchDatastore_Task, 'duration_secs': 0.0101} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.250979] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28843b7a-57e7-4c35-a00e-9d20a893daeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.256817] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1589.256817] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52996a81-d337-4d80-4cf7-1775d9310740" [ 1589.256817] env[62525]: _type = "Task" [ 1589.256817] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.264947] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52996a81-d337-4d80-4cf7-1775d9310740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.376636] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.376953] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.387529] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.705s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.389858] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.777s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.391544] env[62525]: INFO nova.compute.claims [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1589.409587] env[62525]: INFO nova.scheduler.client.report [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Deleted allocations for instance 56cb0d0c-a7dd-4158-8bed-ddff050e0226 [ 1589.470515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.637411] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781559, 'name': ReconfigVM_Task, 'duration_secs': 0.330397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.637690] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.638331] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-808a8abf-c79b-427c-96fc-a811511ec3fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.643850] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1589.643850] env[62525]: value = "task-1781560" [ 1589.643850] env[62525]: _type = "Task" [ 1589.643850] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.651073] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781560, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.768447] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52996a81-d337-4d80-4cf7-1775d9310740, 'name': SearchDatastore_Task, 'duration_secs': 0.00953} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.768717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.769065] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 82443424-6071-44b3-bd9a-f92a1a650f27/82443424-6071-44b3-bd9a-f92a1a650f27.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1589.769377] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c74b8a0-2b49-4876-9e8c-507972a737d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.777471] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1589.777471] env[62525]: value = "task-1781561" [ 1589.777471] env[62525]: _type = "Task" [ 1589.777471] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.785352] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.885033] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.885033] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1589.916626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2c4e7eaa-72d6-456d-b08f-44dc82d18cd1 tempest-ServersAdminTestJSON-245146123 tempest-ServersAdminTestJSON-245146123-project-member] Lock "56cb0d0c-a7dd-4158-8bed-ddff050e0226" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.166s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.154109] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781560, 'name': Rename_Task, 'duration_secs': 0.152395} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.154502] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1590.154649] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b197966-6742-4453-b4d5-4e8e14eac2a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.161286] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1590.161286] env[62525]: value = "task-1781562" [ 1590.161286] env[62525]: _type = "Task" [ 1590.161286] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.169707] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.288085] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781561, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43903} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.288373] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 82443424-6071-44b3-bd9a-f92a1a650f27/82443424-6071-44b3-bd9a-f92a1a650f27.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1590.288591] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1590.288848] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-559af037-e5e4-4dbb-800f-b7711edc8ce7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.296097] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1590.296097] env[62525]: value = "task-1781563" [ 1590.296097] env[62525]: _type = "Task" [ 1590.296097] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.304072] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.643028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ad79e6-35bb-435b-9ce1-1d1cee7def39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.650641] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8d28f2-b4af-430c-b039-cb4b7592daa7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.685589] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4186d57-e757-42c4-b674-af0d214c1eaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.693094] env[62525]: DEBUG oslo_vmware.api [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781562, 'name': PowerOnVM_Task, 'duration_secs': 0.496853} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.695138] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1590.695359] env[62525]: INFO nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1590.695540] env[62525]: DEBUG nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1590.696325] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad94c29-111e-4735-b653-89863911a1e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.699581] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5fe4cc-ac3e-4f04-a483-162b00746fd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.716705] env[62525]: DEBUG nova.compute.provider_tree [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1590.805338] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065665} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.805615] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1590.806371] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73b9d06-0a5f-4ba2-aca0-a012c6c5dc53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.828084] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 82443424-6071-44b3-bd9a-f92a1a650f27/82443424-6071-44b3-bd9a-f92a1a650f27.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1590.828633] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8e573f1-f058-4d78-b82b-2602887862e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.848455] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1590.848455] env[62525]: value = "task-1781564" [ 1590.848455] env[62525]: _type = "Task" [ 1590.848455] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.856869] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.236752] env[62525]: INFO nova.compute.manager [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Took 21.13 seconds to build instance. [ 1591.263773] env[62525]: DEBUG nova.scheduler.client.report [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1591.264065] env[62525]: DEBUG nova.compute.provider_tree [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 89 to 90 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1591.264253] env[62525]: DEBUG nova.compute.provider_tree [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1591.359779] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781564, 'name': ReconfigVM_Task, 'duration_secs': 0.296931} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.360088] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 82443424-6071-44b3-bd9a-f92a1a650f27/82443424-6071-44b3-bd9a-f92a1a650f27.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1591.360751] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93f2c0db-2090-42b0-a8c4-e20ca5e06809 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.367622] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1591.367622] env[62525]: value = "task-1781565" [ 1591.367622] env[62525]: _type = "Task" [ 1591.367622] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.375821] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781565, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.738657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-185ea7d3-c0c8-4aed-aaaa-f3505dc5f7ee tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.642s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.769744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.770367] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1591.773270] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.247s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.773484] env[62525]: DEBUG nova.objects.instance [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lazy-loading 'resources' on Instance uuid 16667060-2172-4c1b-a3c8-340bb38846cf {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1591.878270] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781565, 'name': Rename_Task, 'duration_secs': 0.190418} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.878549] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.878809] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7c33878-a1f7-4c87-9ba5-262c3d99f3af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.886411] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1591.886411] env[62525]: value = "task-1781566" [ 1591.886411] env[62525]: _type = "Task" [ 1591.886411] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.895864] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.074677] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.075130] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.277559] env[62525]: DEBUG nova.compute.utils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1592.279007] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1592.279197] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1592.327534] env[62525]: DEBUG nova.policy [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9848bb0f47541f48af2c808646a09b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '209b99adb38b4c8b9e5a277019dbe292', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1592.399786] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781566, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.577483] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1592.584475] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d2f508-07d6-4161-9ee4-c9c3f1dfc635 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.592963] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ec581e-fd87-4c2d-bf05-679f89efaa29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.630445] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caf3ba4-3c1b-4728-85b1-a80307347de8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.635457] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Successfully created port: b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1592.642788] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faf35d7-75fc-4320-a547-7bd377a744d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.661895] env[62525]: DEBUG nova.compute.provider_tree [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.784788] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1592.899988] env[62525]: DEBUG oslo_vmware.api [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781566, 'name': PowerOnVM_Task, 'duration_secs': 0.612703} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.900597] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1592.900957] env[62525]: INFO nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1592.904322] env[62525]: DEBUG nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1592.905037] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab03a081-9b93-443a-b678-3a12c3af0890 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.108354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.164339] env[62525]: DEBUG nova.scheduler.client.report [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1593.422039] env[62525]: INFO nova.compute.manager [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Took 21.08 seconds to build instance. [ 1593.670104] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.672286] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.900s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.674222] env[62525]: INFO nova.compute.claims [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1593.691790] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.692086] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.692411] env[62525]: DEBUG nova.objects.instance [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.694597] env[62525]: INFO nova.scheduler.client.report [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Deleted allocations for instance 16667060-2172-4c1b-a3c8-340bb38846cf [ 1593.794705] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1593.820845] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1593.821121] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1593.821282] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1593.821476] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1593.821626] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1593.821773] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1593.821975] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1593.822156] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1593.822328] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1593.822512] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1593.822716] env[62525]: DEBUG nova.virt.hardware [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1593.823961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b52955e-80dd-466b-9a66-8b1886765c33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.833018] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366d8498-addf-40b4-83f3-c76413d5f352 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.923771] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35d69c92-6c96-49b8-8d61-c8d59d568f6c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.589s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.203029] env[62525]: DEBUG nova.objects.instance [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.205441] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e04e0af2-4632-4a93-87a1-0b168a3da3a3 tempest-ServerMetadataNegativeTestJSON-1820254651 tempest-ServerMetadataNegativeTestJSON-1820254651-project-member] Lock "16667060-2172-4c1b-a3c8-340bb38846cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.382s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.300389] env[62525]: DEBUG nova.compute.manager [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.300557] env[62525]: DEBUG oslo_concurrency.lockutils [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.300734] env[62525]: DEBUG oslo_concurrency.lockutils [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.300900] env[62525]: DEBUG oslo_concurrency.lockutils [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.301893] env[62525]: DEBUG nova.compute.manager [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] No waiting events found dispatching network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1594.301893] env[62525]: WARNING nova.compute.manager [req-4b660021-afee-4b5c-9d0f-4b5336a48a5f req-de2d2868-3167-4481-8be7-3a2e684036ca service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received unexpected event network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 for instance with vm_state building and task_state spawning. [ 1594.387222] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Successfully updated port: b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1594.579011] env[62525]: DEBUG nova.compute.manager [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Received event network-changed-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.579292] env[62525]: DEBUG nova.compute.manager [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Refreshing instance network info cache due to event network-changed-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1594.579438] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] Acquiring lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.579609] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] Acquired lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.579790] env[62525]: DEBUG nova.network.neutron [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Refreshing network info cache for port fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.706674] env[62525]: DEBUG nova.objects.base [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance<6be49426-ddda-461e-908f-593c0904b129> lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1594.706785] env[62525]: DEBUG nova.network.neutron [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1594.892730] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.894036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.894036] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1594.898371] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5945f502-36fb-4acc-baf5-ed317c2ae27c tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.206s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.973365] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770ba5a5-83aa-47f4-b2dd-54f6db4983a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.981952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e513aee1-6fa1-47ad-98a4-1b197d1dd26b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.015373] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e651797-6dd8-41e3-9e27-15e75de8ebdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.023487] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c27dbf-2115-4939-8925-9bed689a5906 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.039025] env[62525]: DEBUG nova.compute.provider_tree [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.437170] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1595.534877] env[62525]: DEBUG nova.network.neutron [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updated VIF entry in instance network info cache for port fadf8b76-5c96-4ca1-a32f-c85a68c3fb21. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.534877] env[62525]: DEBUG nova.network.neutron [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating instance_info_cache with network_info: [{"id": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "address": "fa:16:3e:05:50:cc", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfadf8b76-5c", "ovs_interfaceid": "fadf8b76-5c96-4ca1-a32f-c85a68c3fb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.544456] env[62525]: DEBUG nova.scheduler.client.report [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1595.670062] env[62525]: DEBUG nova.network.neutron [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.040184] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa6abae8-df8c-4590-a735-bfbdb6be611b req-a95263e7-0fe4-43c4-830b-4c7d18165a93 service nova] Releasing lock "refresh_cache-82443424-6071-44b3-bd9a-f92a1a650f27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.050307] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.050852] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1596.056667] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.828s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.056667] env[62525]: DEBUG nova.objects.instance [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'resources' on Instance uuid cafae62e-b001-4ee0-8e89-4da9c60cf488 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1596.176022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.176022] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance network_info: |[{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1596.176022] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:4a:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b40cb3df-4673-45d7-8b69-c642a8939d96', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1596.182241] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating folder: Project (209b99adb38b4c8b9e5a277019dbe292). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1596.182782] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e33a34bb-1f1f-41d3-b367-59a5d510ab02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.199123] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created folder: Project (209b99adb38b4c8b9e5a277019dbe292) in parent group-v369553. [ 1596.199123] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating folder: Instances. Parent ref: group-v369763. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1596.199123] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe8053b0-b6df-41ad-9af7-a4b7b8495d84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.213185] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created folder: Instances in parent group-v369763. [ 1596.213185] env[62525]: DEBUG oslo.service.loopingcall [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.213185] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1596.213185] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35b6feca-3cb5-4153-b07d-7943c188f11c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.240954] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1596.240954] env[62525]: value = "task-1781569" [ 1596.240954] env[62525]: _type = "Task" [ 1596.240954] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.249720] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781569, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.336405] env[62525]: DEBUG nova.compute.manager [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1596.336621] env[62525]: DEBUG nova.compute.manager [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing instance network info cache due to event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1596.336866] env[62525]: DEBUG oslo_concurrency.lockutils [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.339520] env[62525]: DEBUG oslo_concurrency.lockutils [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.339520] env[62525]: DEBUG nova.network.neutron [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1596.562410] env[62525]: DEBUG nova.compute.utils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.570762] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1596.571058] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1596.652833] env[62525]: DEBUG nova.policy [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c0deb1ab43142f29a15397a2e23d048', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '105f108590e14c649fff545b5b96f4fd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1596.746279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.746279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.746934] env[62525]: DEBUG nova.objects.instance [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1596.766565] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781569, 'name': CreateVM_Task, 'duration_secs': 0.444501} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.766737] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1596.767482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.767643] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.767979] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1596.768269] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-018e4052-4d3b-47e9-b677-104a5d8f4e36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.773760] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1596.773760] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529cc9b3-556c-ec7b-3234-521c4613d8aa" [ 1596.773760] env[62525]: _type = "Task" [ 1596.773760] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.785218] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cc9b3-556c-ec7b-3234-521c4613d8aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.908143] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f384a8-0173-47f9-8223-ef8d77c9bb68 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.914805] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee455d5-3f39-4a4c-afa8-a12057845425 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.948875] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec550d8-b8db-446f-b70c-fe763e9ee247 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.957658] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7926cf6b-6fe7-4bb9-9a30-38169b5f6e66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.977474] env[62525]: DEBUG nova.compute.provider_tree [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.995397] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Successfully created port: eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1597.074813] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1597.208064] env[62525]: DEBUG nova.network.neutron [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updated VIF entry in instance network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1597.212049] env[62525]: DEBUG nova.network.neutron [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.287911] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cc9b3-556c-ec7b-3234-521c4613d8aa, 'name': SearchDatastore_Task, 'duration_secs': 0.018202} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.287911] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.288119] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1597.291863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.291863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.291863] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1597.291863] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42c778e5-6060-40af-884a-434a4fe1549e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.309465] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1597.309465] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1597.309465] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b65ce0d-ab42-48b6-9c16-5bbd6ee7de8b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.320217] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1597.320217] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dbe792-9648-69d5-9a05-48f894c4a15f" [ 1597.320217] env[62525]: _type = "Task" [ 1597.320217] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.330313] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dbe792-9648-69d5-9a05-48f894c4a15f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.374653] env[62525]: DEBUG nova.objects.instance [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1597.483260] env[62525]: DEBUG nova.scheduler.client.report [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1597.714600] env[62525]: DEBUG oslo_concurrency.lockutils [req-c2e822cd-fd12-4401-8bf8-5c1fc9d89315 req-6f648d50-c688-409c-a64a-a5b83e2028ec service nova] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.832614] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dbe792-9648-69d5-9a05-48f894c4a15f, 'name': SearchDatastore_Task, 'duration_secs': 0.052506} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.833464] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a0e5214-8430-4bc2-a109-105c3fe89e32 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.838733] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1597.838733] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5285c840-0ab5-e820-de40-c38543b96071" [ 1597.838733] env[62525]: _type = "Task" [ 1597.838733] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.846686] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5285c840-0ab5-e820-de40-c38543b96071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.876782] env[62525]: DEBUG nova.objects.base [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance<6be49426-ddda-461e-908f-593c0904b129> lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1597.877054] env[62525]: DEBUG nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1597.927476] env[62525]: DEBUG nova.policy [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1597.986424] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.988804] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.518s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.993374] env[62525]: INFO nova.compute.claims [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1598.013528] env[62525]: INFO nova.scheduler.client.report [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted allocations for instance cafae62e-b001-4ee0-8e89-4da9c60cf488 [ 1598.084712] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1598.111943] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1598.112231] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1598.112394] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1598.112573] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1598.112747] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1598.112937] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1598.113230] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1598.113410] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1598.113577] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1598.113748] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1598.113950] env[62525]: DEBUG nova.virt.hardware [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1598.114907] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9baa55-9951-4f6d-8f55-f9dce52ff10a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.123491] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e10f6d-93a1-4c26-ba91-ba6f09e63ba3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.282121] env[62525]: DEBUG nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Successfully created port: fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1598.349550] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5285c840-0ab5-e820-de40-c38543b96071, 'name': SearchDatastore_Task, 'duration_secs': 0.01477} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.349938] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.350098] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1598.350370] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b03faeb9-e081-4686-8993-04bce612db09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.358163] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1598.358163] env[62525]: value = "task-1781570" [ 1598.358163] env[62525]: _type = "Task" [ 1598.358163] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.367856] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.459403] env[62525]: DEBUG nova.compute.manager [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Received event network-vif-plugged-eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1598.459618] env[62525]: DEBUG oslo_concurrency.lockutils [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] Acquiring lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.459828] env[62525]: DEBUG oslo_concurrency.lockutils [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.460213] env[62525]: DEBUG oslo_concurrency.lockutils [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.460461] env[62525]: DEBUG nova.compute.manager [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] No waiting events found dispatching network-vif-plugged-eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1598.460744] env[62525]: WARNING nova.compute.manager [req-1edae2d2-5d94-4ff3-b9bd-e0e6ad8118c5 req-1807742d-58f2-494e-9e51-6097c02f4705 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Received unexpected event network-vif-plugged-eac3eb0e-85c9-4f32-be97-8c93422bdef2 for instance with vm_state building and task_state spawning. [ 1598.527044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-042ef71f-9477-421d-976d-07c5d7c80061 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "cafae62e-b001-4ee0-8e89-4da9c60cf488" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.283s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.546484] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Successfully updated port: eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1598.870378] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781570, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.049324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.049480] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.049632] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1599.219662] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eab0f9-0d7a-48b3-a702-2cb5290b420f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.227330] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ca3452-e9a4-4313-9e9e-e2d632ec5847 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.256974] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8119fbcc-e739-4ed7-a9fe-ebbbb84b534c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.264690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6895645a-45bd-4185-bc81-3cbea2911fc2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.278178] env[62525]: DEBUG nova.compute.provider_tree [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1599.369086] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57628} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.369357] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1599.369567] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1599.369808] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a06fe35c-1333-46a2-8bd7-54d2d54638f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.375724] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1599.375724] env[62525]: value = "task-1781571" [ 1599.375724] env[62525]: _type = "Task" [ 1599.375724] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.383008] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.600054] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1599.782137] env[62525]: DEBUG nova.scheduler.client.report [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1599.846811] env[62525]: DEBUG nova.compute.manager [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-plugged-fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1599.847038] env[62525]: DEBUG oslo_concurrency.lockutils [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.847318] env[62525]: DEBUG oslo_concurrency.lockutils [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.847417] env[62525]: DEBUG oslo_concurrency.lockutils [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.847581] env[62525]: DEBUG nova.compute.manager [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] No waiting events found dispatching network-vif-plugged-fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1599.847741] env[62525]: WARNING nova.compute.manager [req-53de5ac0-2847-4ef0-a545-186bcbe21b2e req-f0ec0b40-fe61-4b4a-ace7-80e52ffd25be service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received unexpected event network-vif-plugged-fb59d389-465d-42dc-ba17-4c75d4f8acbd for instance with vm_state active and task_state None. [ 1599.853257] env[62525]: DEBUG nova.network.neutron [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Updating instance_info_cache with network_info: [{"id": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "address": "fa:16:3e:97:f0:12", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac3eb0e-85", "ovs_interfaceid": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.886132] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069549} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.886461] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1599.887184] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8d90f2-99bf-4aac-aff4-5e8d0ae533f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.910107] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1599.910757] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27ebc8d4-036b-40e3-9a11-c3cf40e879de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.931125] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1599.931125] env[62525]: value = "task-1781572" [ 1599.931125] env[62525]: _type = "Task" [ 1599.931125] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.940295] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.983344] env[62525]: DEBUG nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Successfully updated port: fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1600.289208] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.290155] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1600.292637] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.185s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.294226] env[62525]: INFO nova.compute.claims [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1600.355587] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.355921] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Instance network_info: |[{"id": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "address": "fa:16:3e:97:f0:12", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac3eb0e-85", "ovs_interfaceid": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1600.356384] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:f0:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eac3eb0e-85c9-4f32-be97-8c93422bdef2', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1600.364877] env[62525]: DEBUG oslo.service.loopingcall [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.365131] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1600.365403] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-564b8573-1399-4daa-972f-29c9540d59c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.386409] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1600.386409] env[62525]: value = "task-1781573" [ 1600.386409] env[62525]: _type = "Task" [ 1600.386409] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.394276] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781573, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.441647] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781572, 'name': ReconfigVM_Task, 'duration_secs': 0.444119} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.441967] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfigured VM instance instance-0000004a to attach disk [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1600.442651] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56e5390c-1b1e-4fb3-9811-e516ec6b7b9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.449729] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1600.449729] env[62525]: value = "task-1781574" [ 1600.449729] env[62525]: _type = "Task" [ 1600.449729] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.457932] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781574, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.485451] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.486080] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.486080] env[62525]: DEBUG nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1600.491120] env[62525]: DEBUG nova.compute.manager [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Received event network-changed-eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1600.491120] env[62525]: DEBUG nova.compute.manager [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Refreshing instance network info cache due to event network-changed-eac3eb0e-85c9-4f32-be97-8c93422bdef2. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1600.491120] env[62525]: DEBUG oslo_concurrency.lockutils [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] Acquiring lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.491120] env[62525]: DEBUG oslo_concurrency.lockutils [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] Acquired lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.491120] env[62525]: DEBUG nova.network.neutron [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Refreshing network info cache for port eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.799270] env[62525]: DEBUG nova.compute.utils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1600.802718] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1600.802916] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1600.864726] env[62525]: DEBUG nova.policy [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1600.897808] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781573, 'name': CreateVM_Task, 'duration_secs': 0.414757} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.897808] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1600.898156] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.898326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.898637] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1600.898890] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741be51f-861d-4c8a-aba7-b0868fc680a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.903801] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1600.903801] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520f3031-e7c6-2ab7-4645-9a7a2f721319" [ 1600.903801] env[62525]: _type = "Task" [ 1600.903801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.912037] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520f3031-e7c6-2ab7-4645-9a7a2f721319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.960774] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781574, 'name': Rename_Task, 'duration_secs': 0.172463} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.961086] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1600.961372] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb3f34a4-195d-42f8-a7f6-ff525423b543 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.967898] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1600.967898] env[62525]: value = "task-1781575" [ 1600.967898] env[62525]: _type = "Task" [ 1600.967898] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.976409] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1600.976702] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781575, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.976962] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977162] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977322] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977453] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977594] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977761] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.977898] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1600.978063] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1601.045849] env[62525]: WARNING nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1601.120956] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Successfully created port: ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1601.303982] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1601.354990] env[62525]: DEBUG nova.network.neutron [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Updated VIF entry in instance network info cache for port eac3eb0e-85c9-4f32-be97-8c93422bdef2. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1601.355408] env[62525]: DEBUG nova.network.neutron [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Updating instance_info_cache with network_info: [{"id": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "address": "fa:16:3e:97:f0:12", "network": {"id": "1859ce31-1c7e-4823-afd1-1a9356170c68", "bridge": "br-int", "label": "tempest-ImagesTestJSON-415255835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "105f108590e14c649fff545b5b96f4fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac3eb0e-85", "ovs_interfaceid": "eac3eb0e-85c9-4f32-be97-8c93422bdef2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.414548] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520f3031-e7c6-2ab7-4645-9a7a2f721319, 'name': SearchDatastore_Task, 'duration_secs': 0.03285} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.417070] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.417330] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1601.417566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.417712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.417888] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1601.418470] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2119128b-bdda-43b3-b610-624d0c492505 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.427482] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1601.427666] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1601.428576] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f8486d9-a806-4a08-8748-8f8d64994d2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.438779] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1601.438779] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52243705-b27f-78d1-4b2b-b924c2248581" [ 1601.438779] env[62525]: _type = "Task" [ 1601.438779] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.447403] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52243705-b27f-78d1-4b2b-b924c2248581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.473885] env[62525]: DEBUG nova.network.neutron [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "address": "fa:16:3e:19:88:ff", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb59d389-46", "ovs_interfaceid": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.482179] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.482445] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781575, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.600096] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ee9172-32dd-4fae-b1af-1b2624042325 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.609309] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765733e9-f253-41b8-813a-f1e8ea00b4ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.644555] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e4c0e5-bb1e-47c0-8a63-1e6623706e67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.653369] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f0eb01-744e-4620-822d-37a8fd30b79b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.667188] env[62525]: DEBUG nova.compute.provider_tree [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.864531] env[62525]: DEBUG oslo_concurrency.lockutils [req-99191413-cb7c-439c-9c99-00f14f9034f9 req-a7a40ade-efde-440f-89ed-c13a1da001c7 service nova] Releasing lock "refresh_cache-ad6179ad-bafb-42e7-932c-2aa4a5972c44" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.880099] env[62525]: DEBUG nova.compute.manager [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-changed-fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1601.880441] env[62525]: DEBUG nova.compute.manager [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing instance network info cache due to event network-changed-fb59d389-465d-42dc-ba17-4c75d4f8acbd. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1601.880661] env[62525]: DEBUG oslo_concurrency.lockutils [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.949973] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52243705-b27f-78d1-4b2b-b924c2248581, 'name': SearchDatastore_Task, 'duration_secs': 0.009655} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.950784] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68374fa0-d7ba-4bfc-9e07-29f479c14efd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.957369] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1601.957369] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523f00ad-046d-e837-cd02-6e0e80606668" [ 1601.957369] env[62525]: _type = "Task" [ 1601.957369] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.965653] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f00ad-046d-e837-cd02-6e0e80606668, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.977457] env[62525]: DEBUG oslo_vmware.api [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781575, 'name': PowerOnVM_Task, 'duration_secs': 0.788364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.977763] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.978045] env[62525]: INFO nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1601.978297] env[62525]: DEBUG nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1601.978838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.979513] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.979734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.980481] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4ca27e-7071-4469-924d-c42f0122e9f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.984469] env[62525]: DEBUG oslo_concurrency.lockutils [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.984469] env[62525]: DEBUG nova.network.neutron [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing network info cache for port fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1601.985057] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96440dfc-6795-4386-8599-d65423d7489b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.009110] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1602.009463] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1602.009696] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1602.009938] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1602.010217] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1602.010434] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1602.010695] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1602.010911] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1602.011450] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1602.011699] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1602.011962] env[62525]: DEBUG nova.virt.hardware [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1602.018494] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfiguring VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1602.021281] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-add0e63c-35c1-4424-9141-1c95f6d04f0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.044218] env[62525]: DEBUG oslo_vmware.api [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1602.044218] env[62525]: value = "task-1781576" [ 1602.044218] env[62525]: _type = "Task" [ 1602.044218] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.051746] env[62525]: DEBUG oslo_vmware.api [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781576, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.171492] env[62525]: DEBUG nova.scheduler.client.report [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1602.315974] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1602.343335] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1602.343335] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1602.343335] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1602.343515] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1602.343557] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1602.343693] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1602.343900] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1602.344255] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1602.344509] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1602.344736] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1602.344969] env[62525]: DEBUG nova.virt.hardware [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1602.345862] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e4385b-b127-4b93-ae46-2633745a587d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.353950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12fcd39-2f78-43f1-8754-e62036e08a9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.468358] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523f00ad-046d-e837-cd02-6e0e80606668, 'name': SearchDatastore_Task, 'duration_secs': 0.039666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.468696] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.469044] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad6179ad-bafb-42e7-932c-2aa4a5972c44/ad6179ad-bafb-42e7-932c-2aa4a5972c44.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1602.469362] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-667877fa-407f-4409-92b9-0686352fb31e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.476199] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1602.476199] env[62525]: value = "task-1781577" [ 1602.476199] env[62525]: _type = "Task" [ 1602.476199] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.484269] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.562199] env[62525]: INFO nova.compute.manager [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Took 22.96 seconds to build instance. [ 1602.568252] env[62525]: DEBUG oslo_vmware.api [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.678995] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.679815] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1602.682959] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.201s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.683094] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.683416] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1602.684979] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f5e128-6869-4835-bec3-200a2b56adfe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.694737] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7fca00-f6ab-4cf3-9140-437343f38359 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.712323] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53071a9c-5b6e-42aa-9060-4b5194ea8e52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.725537] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bfffd1-3750-4983-ba7b-ad5faa66b493 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.763103] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178944MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1602.763396] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.763693] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.809748] env[62525]: DEBUG nova.network.neutron [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updated VIF entry in instance network info cache for port fb59d389-465d-42dc-ba17-4c75d4f8acbd. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.810316] env[62525]: DEBUG nova.network.neutron [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "address": "fa:16:3e:19:88:ff", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb59d389-46", "ovs_interfaceid": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.986716] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781577, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.055750] env[62525]: DEBUG oslo_vmware.api [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781576, 'name': ReconfigVM_Task, 'duration_secs': 0.766543} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.056456] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.057029] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfigured VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1603.060820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-34e305c7-9c1c-464f-92cb-a13e8f9b6eb8 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.475s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.188197] env[62525]: DEBUG nova.compute.utils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1603.188964] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1603.189374] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1603.258170] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.258170] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.283346] env[62525]: DEBUG nova.policy [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7f91d593ab442049db42852ea6edd23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af5258cd7a314fc784be2d2e33e6eceb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1603.313210] env[62525]: DEBUG oslo_concurrency.lockutils [req-1b1bf470-bab5-4c91-bb25-799f910e507e req-039b2a1b-811d-4a43-84ae-2c060a5b01fb service nova] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.343399] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Successfully updated port: ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1603.487887] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781577, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.562522] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a9220911-1bbe-44d3-96f7-ee178abe4f4a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.816s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.693938] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1603.701067] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Successfully created port: cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1603.763273] env[62525]: DEBUG nova.compute.utils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1603.823400] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.823559] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.823683] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f589dc1-9244-475f-86d0-4b69b511508b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.823796] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8864d73-35e6-490b-a07c-e8cac8baf880 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.823920] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824149] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 7a92bac8-9cee-41ed-81e3-08b48432fe7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824302] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 94560d78-071c-419d-ad10-f42a5b2271a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824419] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0067de08-6708-4c7c-a83a-ed9df193d5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824531] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824642] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824750] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6be49426-ddda-461e-908f-593c0904b129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824859] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f713b35-9d07-4d25-a333-506fd2469bd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.824963] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8586018-100e-4729-97fc-98effa87cd9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.825084] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 82443424-6071-44b3-bd9a-f92a1a650f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.829283] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.829433] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance ad6179ad-bafb-42e7-932c-2aa4a5972c44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.829516] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.829701] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 80cb1874-2fc8-41ef-b1af-da308f32a2b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.829860] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1603.829988] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3968MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1603.846670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.846833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.846987] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1603.993453] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781577, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.501288} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.993453] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad6179ad-bafb-42e7-932c-2aa4a5972c44/ad6179ad-bafb-42e7-932c-2aa4a5972c44.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1603.993453] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1603.993983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c8f4cc1-20ab-427e-b470-8e4291363f98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.999955] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1603.999955] env[62525]: value = "task-1781578" [ 1603.999955] env[62525]: _type = "Task" [ 1603.999955] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.007020] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.157307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2b37ab-9907-4411-8789-c4ef7c85cd02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.164890] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023dfc47-9c4f-4e1e-a635-24fc2f28c547 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.196792] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ab777-6dd4-4fb0-9159-0c620f4c5e89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.208800] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0049bf49-6a41-4666-9719-d46d245b0ee4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.224178] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.265324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.379309] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1604.510092] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095783} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.510245] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1604.514022] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b841d8a-3585-4fb0-a502-ded589bf1243 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.538056] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] ad6179ad-bafb-42e7-932c-2aa4a5972c44/ad6179ad-bafb-42e7-932c-2aa4a5972c44.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1604.540805] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdbdd63e-93b8-4093-816e-d5bcf7c54ca4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.568020] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1604.568020] env[62525]: value = "task-1781579" [ 1604.568020] env[62525]: _type = "Task" [ 1604.568020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.576399] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.618054] env[62525]: DEBUG nova.network.neutron [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updating instance_info_cache with network_info: [{"id": "ff78f46c-a55c-4838-abfb-b3411ede9893", "address": "fa:16:3e:a1:fc:d7", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff78f46c-a5", "ovs_interfaceid": "ff78f46c-a55c-4838-abfb-b3411ede9893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.688489] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received event network-vif-plugged-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.689406] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.689647] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.689822] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.690123] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] No waiting events found dispatching network-vif-plugged-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1604.690171] env[62525]: WARNING nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received unexpected event network-vif-plugged-ff78f46c-a55c-4838-abfb-b3411ede9893 for instance with vm_state building and task_state spawning. [ 1604.690336] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received event network-changed-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.690492] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Refreshing instance network info cache due to event network-changed-ff78f46c-a55c-4838-abfb-b3411ede9893. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1604.690660] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Acquiring lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.708791] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1604.728570] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1604.738199] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1604.738457] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1604.738595] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1604.738773] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1604.738936] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1604.739080] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1604.739294] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1604.739497] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1604.739808] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1604.739808] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1604.739907] env[62525]: DEBUG nova.virt.hardware [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1604.740773] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3b8ca6-97e9-4550-941c-db921c296e7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.750017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0c1a87-9b2b-43ea-8e95-bce315d9d7b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.077679] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.124027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.124027] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance network_info: |[{"id": "ff78f46c-a55c-4838-abfb-b3411ede9893", "address": "fa:16:3e:a1:fc:d7", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff78f46c-a5", "ovs_interfaceid": "ff78f46c-a55c-4838-abfb-b3411ede9893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1605.124027] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Acquired lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.124027] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Refreshing network info cache for port ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1605.124792] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:fc:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff78f46c-a55c-4838-abfb-b3411ede9893', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1605.139310] env[62525]: DEBUG oslo.service.loopingcall [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1605.144623] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1605.146161] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cf0ff48-2799-41b6-89bb-bb0c97a5d1d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.184448] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1605.184448] env[62525]: value = "task-1781580" [ 1605.184448] env[62525]: _type = "Task" [ 1605.184448] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.197342] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781580, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.233241] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1605.233537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.470s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.334531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.334769] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.334998] env[62525]: INFO nova.compute.manager [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Attaching volume 8730a5a8-842b-4e72-b419-46687a36c2bc to /dev/sdb [ 1605.375665] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e1b61d-85a0-4543-a2cc-843f3fe477f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.383421] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99843b0e-7b1f-44bd-9612-292f6bf0fb02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.396897] env[62525]: DEBUG nova.virt.block_device [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating existing volume attachment record: a4725504-627b-4ea1-8a48-efc95c95184f {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1605.565846] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updated VIF entry in instance network info cache for port ff78f46c-a55c-4838-abfb-b3411ede9893. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1605.566438] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updating instance_info_cache with network_info: [{"id": "ff78f46c-a55c-4838-abfb-b3411ede9893", "address": "fa:16:3e:a1:fc:d7", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff78f46c-a5", "ovs_interfaceid": "ff78f46c-a55c-4838-abfb-b3411ede9893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.576547] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781579, 'name': ReconfigVM_Task, 'duration_secs': 0.792103} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.576823] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Reconfigured VM instance instance-0000004b to attach disk [datastore1] ad6179ad-bafb-42e7-932c-2aa4a5972c44/ad6179ad-bafb-42e7-932c-2aa4a5972c44.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1605.577490] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2f06453-d2f5-4e46-a219-462cec84be78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.587920] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1605.587920] env[62525]: value = "task-1781583" [ 1605.587920] env[62525]: _type = "Task" [ 1605.587920] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.594671] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781583, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.655129] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Successfully updated port: cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1605.695180] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781580, 'name': CreateVM_Task, 'duration_secs': 0.507817} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.695376] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1605.696095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.696270] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.696599] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1605.697997] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41a5bc82-979a-419b-b7c2-e1fe3d0eef0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.702366] env[62525]: DEBUG nova.compute.manager [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Received event network-vif-plugged-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.702366] env[62525]: DEBUG oslo_concurrency.lockutils [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] Acquiring lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.702366] env[62525]: DEBUG oslo_concurrency.lockutils [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.702366] env[62525]: DEBUG oslo_concurrency.lockutils [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.702366] env[62525]: DEBUG nova.compute.manager [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] No waiting events found dispatching network-vif-plugged-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1605.702751] env[62525]: WARNING nova.compute.manager [req-c85eba42-b59f-4be0-a2c1-b8e2b7d4ead1 req-b591db88-8cb1-404e-a32f-e5fd36d4ba0b service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Received unexpected event network-vif-plugged-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 for instance with vm_state building and task_state spawning. [ 1605.708183] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1605.708183] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52373c3a-6e00-b155-253c-ae1738cf82c7" [ 1605.708183] env[62525]: _type = "Task" [ 1605.708183] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.717178] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52373c3a-6e00-b155-253c-ae1738cf82c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.805045] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-6be49426-ddda-461e-908f-593c0904b129-2304276c-c5b3-41c9-b6c2-def8525f0cbc" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.805336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-2304276c-c5b3-41c9-b6c2-def8525f0cbc" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.805706] env[62525]: DEBUG nova.objects.instance [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.072629] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Releasing lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.072910] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1606.073121] env[62525]: DEBUG nova.compute.manager [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing instance network info cache due to event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1606.073345] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.073484] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.073644] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.099816] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781583, 'name': Rename_Task, 'duration_secs': 0.193889} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.100135] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1606.100617] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c2c0102-2707-4044-a24e-f0041a6f1125 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.108179] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1606.108179] env[62525]: value = "task-1781585" [ 1606.108179] env[62525]: _type = "Task" [ 1606.108179] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.122097] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.158358] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.158485] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.165060] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.222707] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52373c3a-6e00-b155-253c-ae1738cf82c7, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.222707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.222707] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1606.222909] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.222909] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.224030] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1606.224030] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96773cdd-9e90-47fb-af8c-058ef0ced8d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.249023] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1606.249023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1606.249618] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4d87c98-26c4-456b-986a-bebfd60a45e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.258433] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1606.258433] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52418c33-465e-881e-8b43-6ebed183fcbc" [ 1606.258433] env[62525]: _type = "Task" [ 1606.258433] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.269068] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52418c33-465e-881e-8b43-6ebed183fcbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.485978] env[62525]: DEBUG nova.objects.instance [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.619690] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781585, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.689758] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1606.768984] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52418c33-465e-881e-8b43-6ebed183fcbc, 'name': SearchDatastore_Task, 'duration_secs': 0.274012} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.769968] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae32abc-a970-4dd6-9985-2fbb97f5a53b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.779082] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1606.779082] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fe835c-f3ca-f378-fe46-da0109c9bb1e" [ 1606.779082] env[62525]: _type = "Task" [ 1606.779082] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.787803] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fe835c-f3ca-f378-fe46-da0109c9bb1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.849429] env[62525]: DEBUG nova.network.neutron [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Updating instance_info_cache with network_info: [{"id": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "address": "fa:16:3e:18:4d:30", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc43ffc1-37", "ovs_interfaceid": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.926639] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updated VIF entry in instance network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1606.927107] env[62525]: DEBUG nova.network.neutron [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.987494] env[62525]: DEBUG nova.objects.base [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance<6be49426-ddda-461e-908f-593c0904b129> lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1606.987756] env[62525]: DEBUG nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1607.086051] env[62525]: DEBUG nova.policy [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1607.122191] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781585, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.293500] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fe835c-f3ca-f378-fe46-da0109c9bb1e, 'name': SearchDatastore_Task, 'duration_secs': 0.03887} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.293879] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.294194] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6/1003d1d2-3f2a-4c54-b8de-721a58ef2fd6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1607.294473] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b24510b-3d75-45b7-9db0-acb751730d54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.301112] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1607.301112] env[62525]: value = "task-1781586" [ 1607.301112] env[62525]: _type = "Task" [ 1607.301112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.311264] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.353589] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.353589] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Instance network_info: |[{"id": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "address": "fa:16:3e:18:4d:30", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc43ffc1-37", "ovs_interfaceid": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1607.353589] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:4d:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc43ffc1-3745-4fef-81c6-5f9a2c5e7130', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1607.367237] env[62525]: DEBUG oslo.service.loopingcall [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.367962] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1607.368413] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-567c6463-dc93-45d8-84d5-885b49b8375a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.397870] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1607.397870] env[62525]: value = "task-1781587" [ 1607.397870] env[62525]: _type = "Task" [ 1607.397870] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.409636] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781587, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.430560] env[62525]: DEBUG oslo_concurrency.lockutils [req-00ece89c-c2a4-4ff8-9d54-b5724166101e req-4dd9e7ff-3229-4061-8f74-2e66df081b42 service nova] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.621699] env[62525]: DEBUG oslo_vmware.api [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781585, 'name': PowerOnVM_Task, 'duration_secs': 1.338593} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.622077] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1607.622287] env[62525]: INFO nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1607.622447] env[62525]: DEBUG nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1607.623296] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661c5434-b464-40cf-afd1-3b886380c712 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.731397] env[62525]: DEBUG nova.compute.manager [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Received event network-changed-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1607.731579] env[62525]: DEBUG nova.compute.manager [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Refreshing instance network info cache due to event network-changed-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1607.731800] env[62525]: DEBUG oslo_concurrency.lockutils [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] Acquiring lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.731946] env[62525]: DEBUG oslo_concurrency.lockutils [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] Acquired lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.732124] env[62525]: DEBUG nova.network.neutron [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Refreshing network info cache for port cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.812503] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781586, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.909256] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781587, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.139762] env[62525]: INFO nova.compute.manager [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Took 21.38 seconds to build instance. [ 1608.311555] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781586, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840207} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.311771] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6/1003d1d2-3f2a-4c54-b8de-721a58ef2fd6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1608.311975] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1608.312237] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-962e8eb7-a760-4fbe-8806-2bbf66a307ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.318487] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1608.318487] env[62525]: value = "task-1781589" [ 1608.318487] env[62525]: _type = "Task" [ 1608.318487] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.325500] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781589, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.408310] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781587, 'name': CreateVM_Task, 'duration_secs': 0.712641} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.408499] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.409192] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.409357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.409676] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.409934] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bbe5cdd-f3f3-47d9-8a52-7e6d80db8910 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.414614] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1608.414614] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5280ea33-a9da-2ce6-81a7-39706ecbbaf4" [ 1608.414614] env[62525]: _type = "Task" [ 1608.414614] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.424618] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5280ea33-a9da-2ce6-81a7-39706ecbbaf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.561819] env[62525]: DEBUG nova.network.neutron [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Updated VIF entry in instance network info cache for port cc43ffc1-3745-4fef-81c6-5f9a2c5e7130. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.562198] env[62525]: DEBUG nova.network.neutron [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Updating instance_info_cache with network_info: [{"id": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "address": "fa:16:3e:18:4d:30", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc43ffc1-37", "ovs_interfaceid": "cc43ffc1-3745-4fef-81c6-5f9a2c5e7130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.641705] env[62525]: DEBUG oslo_concurrency.lockutils [None req-38dfb670-6d69-4df4-aac3-762b5314d7e3 tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.896s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.829141] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781589, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074735} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.830208] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1608.830208] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f4376b-1689-41e6-8459-ba6a9579dd1c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.855323] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6/1003d1d2-3f2a-4c54-b8de-721a58ef2fd6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1608.859304] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03264726-20c6-4ec5-9577-b1bad0bc07f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.878106] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1608.878106] env[62525]: value = "task-1781590" [ 1608.878106] env[62525]: _type = "Task" [ 1608.878106] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.888489] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781590, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.926266] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5280ea33-a9da-2ce6-81a7-39706ecbbaf4, 'name': SearchDatastore_Task, 'duration_secs': 0.009418} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.926585] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.926816] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1608.927061] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.927308] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.927384] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1608.927630] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-645fbda3-ca85-4cc1-8ba7-58d414026233 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.941053] env[62525]: DEBUG nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Successfully updated port: 2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1608.943401] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1608.943484] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1608.944548] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54917b7b-673c-4ec1-929e-da4421687c32 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.950414] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1608.950414] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fd8010-3c29-e123-0387-90a460892609" [ 1608.950414] env[62525]: _type = "Task" [ 1608.950414] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.959051] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd8010-3c29-e123-0387-90a460892609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.067589] env[62525]: DEBUG oslo_concurrency.lockutils [req-fb640c9b-c162-44ff-9347-5cd41f31a8dd req-dd5d7c0d-77d6-44af-8df0-3e8f88b18116 service nova] Releasing lock "refresh_cache-80cb1874-2fc8-41ef-b1af-da308f32a2b0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.388429] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781590, 'name': ReconfigVM_Task, 'duration_secs': 0.352459} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.388780] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6/1003d1d2-3f2a-4c54-b8de-721a58ef2fd6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1609.389541] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6564c504-8ed9-4736-902c-ada2bdf18e61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.403805] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1609.403805] env[62525]: value = "task-1781591" [ 1609.403805] env[62525]: _type = "Task" [ 1609.403805] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.413882] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781591, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.448279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.452457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.452588] env[62525]: DEBUG nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.468970] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd8010-3c29-e123-0387-90a460892609, 'name': SearchDatastore_Task, 'duration_secs': 0.011448} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.470541] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f21f20a-b1b5-401e-b417-5ea907ec9401 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.476473] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1609.476473] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d5d8fe-f2b8-2975-2874-d12b7a8ff04e" [ 1609.476473] env[62525]: _type = "Task" [ 1609.476473] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.488438] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d5d8fe-f2b8-2975-2874-d12b7a8ff04e, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.488682] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.488932] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 80cb1874-2fc8-41ef-b1af-da308f32a2b0/80cb1874-2fc8-41ef-b1af-da308f32a2b0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.489235] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f33de09-cdf0-4c83-9a0e-19c306a58899 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.497217] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1609.497217] env[62525]: value = "task-1781592" [ 1609.497217] env[62525]: _type = "Task" [ 1609.497217] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.504634] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.763937] env[62525]: DEBUG nova.compute.manager [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-plugged-2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1609.767552] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.767552] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.767552] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Lock "6be49426-ddda-461e-908f-593c0904b129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.767552] env[62525]: DEBUG nova.compute.manager [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] No waiting events found dispatching network-vif-plugged-2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1609.767552] env[62525]: WARNING nova.compute.manager [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received unexpected event network-vif-plugged-2304276c-c5b3-41c9-b6c2-def8525f0cbc for instance with vm_state active and task_state None. [ 1609.767552] env[62525]: DEBUG nova.compute.manager [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-changed-2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1609.767863] env[62525]: DEBUG nova.compute.manager [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing instance network info cache due to event network-changed-2304276c-c5b3-41c9-b6c2-def8525f0cbc. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1609.767863] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.917687] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781591, 'name': Rename_Task, 'duration_secs': 0.145605} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.917687] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1609.917905] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ff13031-61a7-4ee0-9a26-ddd5120414b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.925105] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1609.925105] env[62525]: value = "task-1781593" [ 1609.925105] env[62525]: _type = "Task" [ 1609.925105] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.933514] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781593, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.955666] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1609.955666] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369770', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'name': 'volume-8730a5a8-842b-4e72-b419-46687a36c2bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e8864d73-35e6-490b-a07c-e8cac8baf880', 'attached_at': '', 'detached_at': '', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'serial': '8730a5a8-842b-4e72-b419-46687a36c2bc'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1609.956738] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8670f86-3ac0-44a1-8f53-83442241813c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.976703] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8cdcd5-53ca-490f-a37f-0db8a9a499a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.997255] env[62525]: DEBUG nova.compute.manager [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1610.006079] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] volume-8730a5a8-842b-4e72-b419-46687a36c2bc/volume-8730a5a8-842b-4e72-b419-46687a36c2bc.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1610.007083] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850787d4-c8a6-44d7-ac15-c8f7d1bea7f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.009852] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ba930f8-2caa-466d-80d2-e718cbd741da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.047086] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Waiting for the task: (returnval){ [ 1610.047086] env[62525]: value = "task-1781594" [ 1610.047086] env[62525]: _type = "Task" [ 1610.047086] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.047396] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781592, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.048782] env[62525]: WARNING nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1610.048998] env[62525]: WARNING nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1610.061292] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781594, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.442049] env[62525]: DEBUG oslo_vmware.api [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781593, 'name': PowerOnVM_Task, 'duration_secs': 0.49989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.442049] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1610.442049] env[62525]: INFO nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1610.442049] env[62525]: DEBUG nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1610.443196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76672401-59d2-45c3-b83b-c28e01aab6cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.518072] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637994} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.518331] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 80cb1874-2fc8-41ef-b1af-da308f32a2b0/80cb1874-2fc8-41ef-b1af-da308f32a2b0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.518538] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.518781] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8df1a90b-fc6e-4cb1-b834-437eb2deab08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.529022] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1610.529022] env[62525]: value = "task-1781595" [ 1610.529022] env[62525]: _type = "Task" [ 1610.529022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.534455] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.549905] env[62525]: INFO nova.compute.manager [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] instance snapshotting [ 1610.559025] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96929505-10d8-4fb7-84b5-a21541db75d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.562728] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781594, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.579714] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214ef2c8-3faf-4c08-ad17-3f73c612f717 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.785918] env[62525]: DEBUG nova.network.neutron [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "address": "fa:16:3e:19:88:ff", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb59d389-46", "ovs_interfaceid": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "address": "fa:16:3e:2b:00:3a", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2304276c-c5", "ovs_interfaceid": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.971174] env[62525]: INFO nova.compute.manager [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Took 21.52 seconds to build instance. [ 1611.035514] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067527} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.035795] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.036667] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba34c180-5029-4899-acce-4946085dbe6b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.061335] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 80cb1874-2fc8-41ef-b1af-da308f32a2b0/80cb1874-2fc8-41ef-b1af-da308f32a2b0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.061335] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bb4a5d3-1ed0-4e9b-a046-d280cc4777f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.086252] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781594, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.087687] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1611.087687] env[62525]: value = "task-1781596" [ 1611.087687] env[62525]: _type = "Task" [ 1611.087687] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.091793] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1611.092142] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8dc6eddf-4c3d-418b-a3ec-5efff55a2792 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.098935] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781596, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.100320] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1611.100320] env[62525]: value = "task-1781597" [ 1611.100320] env[62525]: _type = "Task" [ 1611.100320] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.107819] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781597, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.289551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.290321] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.290501] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.290860] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.291060] env[62525]: DEBUG nova.network.neutron [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Refreshing network info cache for port 2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1611.293015] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca12f74-3915-46f5-ba95-01acb391df91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.315735] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1611.315995] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1611.316205] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1611.316454] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1611.316594] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1611.316757] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1611.316992] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1611.317193] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1611.317416] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1611.317590] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1611.317794] env[62525]: DEBUG nova.virt.hardware [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1611.325188] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfiguring VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1611.325969] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc30236c-e169-4e82-85a9-8d009423c244 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.345763] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1611.345763] env[62525]: value = "task-1781598" [ 1611.345763] env[62525]: _type = "Task" [ 1611.345763] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.355234] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781598, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.413371] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.413699] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.474616] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d7a1ee3f-aa72-4d6e-8393-fe966a566350 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.027s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.569589] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781594, 'name': ReconfigVM_Task, 'duration_secs': 1.399752} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.569886] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfigured VM instance instance-00000029 to attach disk [datastore1] volume-8730a5a8-842b-4e72-b419-46687a36c2bc/volume-8730a5a8-842b-4e72-b419-46687a36c2bc.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.574825] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f78b6e4e-a3d7-479b-98a4-65c2a9a4f2ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.594253] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Waiting for the task: (returnval){ [ 1611.594253] env[62525]: value = "task-1781599" [ 1611.594253] env[62525]: _type = "Task" [ 1611.594253] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.602231] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781596, 'name': ReconfigVM_Task, 'duration_secs': 0.406067} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.605863] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 80cb1874-2fc8-41ef-b1af-da308f32a2b0/80cb1874-2fc8-41ef-b1af-da308f32a2b0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.609641] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6345d1e9-9c7d-4fd4-804c-d60cf9c6f303 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.611788] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781599, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.617847] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781597, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.619389] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1611.619389] env[62525]: value = "task-1781600" [ 1611.619389] env[62525]: _type = "Task" [ 1611.619389] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.628374] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781600, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.861612] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781598, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.916633] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1612.047751] env[62525]: DEBUG nova.network.neutron [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updated VIF entry in instance network info cache for port 2304276c-c5b3-41c9-b6c2-def8525f0cbc. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1612.048239] env[62525]: DEBUG nova.network.neutron [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "address": "fa:16:3e:19:88:ff", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb59d389-46", "ovs_interfaceid": "fb59d389-465d-42dc-ba17-4c75d4f8acbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "address": "fa:16:3e:2b:00:3a", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2304276c-c5", "ovs_interfaceid": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.105426] env[62525]: DEBUG oslo_vmware.api [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781599, 'name': ReconfigVM_Task, 'duration_secs': 0.174295} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.108632] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369770', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'name': 'volume-8730a5a8-842b-4e72-b419-46687a36c2bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e8864d73-35e6-490b-a07c-e8cac8baf880', 'attached_at': '', 'detached_at': '', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'serial': '8730a5a8-842b-4e72-b419-46687a36c2bc'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1612.114984] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781597, 'name': CreateSnapshot_Task, 'duration_secs': 0.667792} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.115234] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1612.116033] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57481a8a-d61c-47c2-9ad3-2934ad950515 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.139418] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781600, 'name': Rename_Task, 'duration_secs': 0.184604} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.140378] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1612.140683] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2a925d6-d66c-45b6-b8b7-9653d2efe92c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.149974] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1612.149974] env[62525]: value = "task-1781601" [ 1612.149974] env[62525]: _type = "Task" [ 1612.149974] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.158604] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.357376] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781598, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.372017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.372296] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.372479] env[62525]: INFO nova.compute.manager [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Shelving [ 1612.440987] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.441390] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.442940] env[62525]: INFO nova.compute.claims [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1612.551476] env[62525]: DEBUG oslo_concurrency.lockutils [req-0c58ba95-719d-4eed-8d42-1d37778f69df req-23efe684-43ea-4eae-a2a4-6dfca3321b8e service nova] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.647348] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1612.649524] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-05c7f090-5caf-4938-ad0a-ae20b3689ca3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.661674] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781601, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.663600] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1612.663600] env[62525]: value = "task-1781602" [ 1612.663600] env[62525]: _type = "Task" [ 1612.663600] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.671894] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781602, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.858656] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781598, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.881407] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1612.881737] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea8529c2-e3f1-43ab-a4e8-e59b1966d0e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.888871] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1612.888871] env[62525]: value = "task-1781603" [ 1612.888871] env[62525]: _type = "Task" [ 1612.888871] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.898241] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.154711] env[62525]: DEBUG nova.objects.instance [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lazy-loading 'flavor' on Instance uuid e8864d73-35e6-490b-a07c-e8cac8baf880 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.165418] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781601, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.175640] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781602, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.358931] env[62525]: DEBUG oslo_vmware.api [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781598, 'name': ReconfigVM_Task, 'duration_secs': 1.690094} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.359454] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.359671] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfigured VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1613.398835] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781603, 'name': PowerOffVM_Task, 'duration_secs': 0.180272} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.399100] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.399917] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbb2d29-223c-458d-b160-072446e0669d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.419834] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdde89d7-90c4-43d0-8f2a-7295d20e9564 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.665751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3b63559-b51b-445c-9262-516bd0342e6f tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.331s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.672930] env[62525]: DEBUG oslo_vmware.api [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781601, 'name': PowerOnVM_Task, 'duration_secs': 1.209992} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.676424] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.676532] env[62525]: INFO nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1613.676627] env[62525]: DEBUG nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1613.678914] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bd8463-e012-4130-a5a8-4a5b5774efc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.687657] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781602, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.717502] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa592b41-f149-4bd2-8dc3-8b0e778cc6c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.725281] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aefd6e-a523-491c-b0db-376021092292 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.757800] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b494417f-b129-47eb-b5dd-f16bbb046482 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.764907] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58451a2c-910e-4b1c-9dcf-a27157b3611d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.778212] env[62525]: DEBUG nova.compute.provider_tree [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.864243] env[62525]: DEBUG oslo_concurrency.lockutils [None req-64c3bc6c-76e4-4c11-9aca-82233d51d95a tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-2304276c-c5b3-41c9-b6c2-def8525f0cbc" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.059s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.929860] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1613.930143] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e2fb2ee4-78d6-4f55-9506-d99c89a5041f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.937113] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1613.937113] env[62525]: value = "task-1781604" [ 1613.937113] env[62525]: _type = "Task" [ 1613.937113] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.945905] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781604, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.178343] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781602, 'name': CloneVM_Task, 'duration_secs': 1.232118} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.178629] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Created linked-clone VM from snapshot [ 1614.179408] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67914567-4bd1-4896-b36c-dc4fcb524d69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.187312] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Uploading image 86581e26-7dae-4133-b24d-c5167f07ccfd {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1614.190995] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.191310] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.204567] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1614.205364] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-98327e86-ae63-48b5-a640-27be29e3f466 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.209638] env[62525]: INFO nova.compute.manager [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Took 21.12 seconds to build instance. [ 1614.215866] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1614.215866] env[62525]: value = "task-1781605" [ 1614.215866] env[62525]: _type = "Task" [ 1614.215866] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.226274] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781605, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.281349] env[62525]: DEBUG nova.scheduler.client.report [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1614.447864] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781604, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.694449] env[62525]: INFO nova.compute.manager [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Detaching volume 8730a5a8-842b-4e72-b419-46687a36c2bc [ 1614.712474] env[62525]: DEBUG oslo_concurrency.lockutils [None req-14c570b5-5abc-494d-a682-fa6a3904b8af tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.637s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.726496] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781605, 'name': Destroy_Task, 'duration_secs': 0.412588} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.727025] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Destroyed the VM [ 1614.727104] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1614.727394] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-60e17c3a-919d-4186-81c4-3e6687962aeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.730703] env[62525]: INFO nova.virt.block_device [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Attempting to driver detach volume 8730a5a8-842b-4e72-b419-46687a36c2bc from mountpoint /dev/sdb [ 1614.730703] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1614.730827] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369770', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'name': 'volume-8730a5a8-842b-4e72-b419-46687a36c2bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e8864d73-35e6-490b-a07c-e8cac8baf880', 'attached_at': '', 'detached_at': '', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'serial': '8730a5a8-842b-4e72-b419-46687a36c2bc'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1614.731853] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e88d0b-7c1c-48e3-a2ff-3a903bc3b688 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.735840] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1614.735840] env[62525]: value = "task-1781606" [ 1614.735840] env[62525]: _type = "Task" [ 1614.735840] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.757904] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77db4554-8a69-4c07-9271-f49d33a6e336 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.763484] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781606, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.767620] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d429795-9007-482a-9c30-af9b61fc6beb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.787379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.787859] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1614.790924] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a3f43a-3564-4db9-9eff-708a6fe2275e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.805714] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] The volume has not been displaced from its original location: [datastore1] volume-8730a5a8-842b-4e72-b419-46687a36c2bc/volume-8730a5a8-842b-4e72-b419-46687a36c2bc.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1614.811108] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfiguring VM instance instance-00000029 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1614.811449] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e4d214d-8b65-4130-af12-81bdc2f98cbf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.829646] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Waiting for the task: (returnval){ [ 1614.829646] env[62525]: value = "task-1781607" [ 1614.829646] env[62525]: _type = "Task" [ 1614.829646] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.841853] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781607, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.947995] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781604, 'name': CreateSnapshot_Task, 'duration_secs': 0.623074} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.948325] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1614.949111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d0bb05-d906-4c33-ac2f-970dff27fc5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.073200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.073524] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.073735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.073916] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.074097] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.076277] env[62525]: INFO nova.compute.manager [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Terminating instance [ 1615.077973] env[62525]: DEBUG nova.compute.manager [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1615.078190] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1615.078991] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2818e28b-18f4-4d04-89b4-258f60c5f014 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.086747] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.086957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37fc9ec4-fdc1-4978-bbac-cd4516e469a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.093191] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1615.093191] env[62525]: value = "task-1781608" [ 1615.093191] env[62525]: _type = "Task" [ 1615.093191] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.100881] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.246813] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781606, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.296043] env[62525]: DEBUG nova.compute.utils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1615.297923] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1615.298173] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1615.339855] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781607, 'name': ReconfigVM_Task, 'duration_secs': 0.227235} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.340254] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Reconfigured VM instance instance-00000029 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1615.346711] env[62525]: DEBUG nova.policy [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '331219e13abe4d2581652c7e3fd4547e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aee4f0dad260446a8f58605ce463957b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1615.348186] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79bad1e1-7850-4dc4-bcb4-78db22731966 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.364571] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Waiting for the task: (returnval){ [ 1615.364571] env[62525]: value = "task-1781609" [ 1615.364571] env[62525]: _type = "Task" [ 1615.364571] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.372500] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.467942] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1615.468277] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a85bc298-852f-4850-8bbb-20cddab2656c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.476253] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1615.476253] env[62525]: value = "task-1781610" [ 1615.476253] env[62525]: _type = "Task" [ 1615.476253] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.484489] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781610, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.604835] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781608, 'name': PowerOffVM_Task, 'duration_secs': 0.198901} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.605208] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1615.605480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1615.605829] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b8eddce-7e7b-4f56-ba49-34491cd0fcf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.615903] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Successfully created port: ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1615.645700] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-6be49426-ddda-461e-908f-593c0904b129-fb59d389-465d-42dc-ba17-4c75d4f8acbd" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.645980] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-fb59d389-465d-42dc-ba17-4c75d4f8acbd" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.678571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1615.678877] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1615.679240] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleting the datastore file [datastore1] 80cb1874-2fc8-41ef-b1af-da308f32a2b0 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1615.679550] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b23925f-09d0-44be-b418-3ccc5a8e74ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.690134] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1615.690134] env[62525]: value = "task-1781612" [ 1615.690134] env[62525]: _type = "Task" [ 1615.690134] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.702036] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.755989] env[62525]: DEBUG oslo_vmware.api [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781606, 'name': RemoveSnapshot_Task, 'duration_secs': 0.681724} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.756236] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1615.802579] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1615.874937] env[62525]: DEBUG oslo_vmware.api [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Task: {'id': task-1781609, 'name': ReconfigVM_Task, 'duration_secs': 0.169644} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.875307] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369770', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'name': 'volume-8730a5a8-842b-4e72-b419-46687a36c2bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e8864d73-35e6-490b-a07c-e8cac8baf880', 'attached_at': '', 'detached_at': '', 'volume_id': '8730a5a8-842b-4e72-b419-46687a36c2bc', 'serial': '8730a5a8-842b-4e72-b419-46687a36c2bc'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1615.987443] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781610, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.148602] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.148814] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.149697] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d5e15b-64d0-4602-9944-29bb58b0384e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.168199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5943fa-88b8-4d9f-8ccc-1b2a6cd5fdf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.196991] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfiguring VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1616.200079] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8117b7b8-5637-40d1-859c-99b202232aba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.219255] env[62525]: DEBUG oslo_vmware.api [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181768} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.220208] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.220208] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.220208] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1616.220208] env[62525]: INFO nova.compute.manager [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1616.220208] env[62525]: DEBUG oslo.service.loopingcall [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.220478] env[62525]: DEBUG nova.compute.manager [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1616.220478] env[62525]: DEBUG nova.network.neutron [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.262786] env[62525]: WARNING nova.compute.manager [None req-97d2fde6-b3fc-40c9-9734-51a22e9d1acf tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Image not found during snapshot: nova.exception.ImageNotFound: Image 86581e26-7dae-4133-b24d-c5167f07ccfd could not be found. [ 1616.351117] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1616.351117] env[62525]: value = "task-1781613" [ 1616.351117] env[62525]: _type = "Task" [ 1616.351117] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.430474] env[62525]: DEBUG nova.objects.instance [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lazy-loading 'flavor' on Instance uuid e8864d73-35e6-490b-a07c-e8cac8baf880 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1616.487561] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781610, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.555952] env[62525]: DEBUG nova.compute.manager [req-8bfedf20-7667-47c3-bfa8-bc72e0e559f7 req-8cd8c33b-42ef-454c-9c75-c323a3ab4be9 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Received event network-vif-deleted-cc43ffc1-3745-4fef-81c6-5f9a2c5e7130 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1616.556179] env[62525]: INFO nova.compute.manager [req-8bfedf20-7667-47c3-bfa8-bc72e0e559f7 req-8cd8c33b-42ef-454c-9c75-c323a3ab4be9 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Neutron deleted interface cc43ffc1-3745-4fef-81c6-5f9a2c5e7130; detaching it from the instance and deleting it from the info cache [ 1616.556349] env[62525]: DEBUG nova.network.neutron [req-8bfedf20-7667-47c3-bfa8-bc72e0e559f7 req-8cd8c33b-42ef-454c-9c75-c323a3ab4be9 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.814310] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1616.846622] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1616.846960] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1616.847040] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1616.847203] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1616.847399] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1616.847613] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1616.847836] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1616.848025] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1616.848319] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1616.848385] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1616.848513] env[62525]: DEBUG nova.virt.hardware [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1616.849690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6c811a-a3be-442d-9688-1b4953f3cbe7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.865466] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d83ee5-16b2-4de9-9629-726ff60e1ba5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.869642] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.986766] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781610, 'name': CloneVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.035248] env[62525]: DEBUG nova.network.neutron [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.062027] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6eeb1b6-05de-4b0b-bf56-cd5b45b70f72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.069081] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885d73a2-30a6-46f8-a025-59c7eaedc3bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.086658] env[62525]: DEBUG nova.compute.manager [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Received event network-vif-plugged-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.086887] env[62525]: DEBUG oslo_concurrency.lockutils [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] Acquiring lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.087107] env[62525]: DEBUG oslo_concurrency.lockutils [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.087278] env[62525]: DEBUG oslo_concurrency.lockutils [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.087451] env[62525]: DEBUG nova.compute.manager [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] No waiting events found dispatching network-vif-plugged-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1617.087614] env[62525]: WARNING nova.compute.manager [req-3f24c24d-d6a2-4cf4-a3d0-8e8289ac7648 req-c663f7bc-1a5b-47f1-9d79-fa618db1fa2a service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Received unexpected event network-vif-plugged-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 for instance with vm_state building and task_state spawning. [ 1617.105703] env[62525]: DEBUG nova.compute.manager [req-8bfedf20-7667-47c3-bfa8-bc72e0e559f7 req-8cd8c33b-42ef-454c-9c75-c323a3ab4be9 service nova] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Detach interface failed, port_id=cc43ffc1-3745-4fef-81c6-5f9a2c5e7130, reason: Instance 80cb1874-2fc8-41ef-b1af-da308f32a2b0 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1617.183267] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Successfully updated port: ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1617.304800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.305218] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.305582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.305880] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.306171] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.308910] env[62525]: INFO nova.compute.manager [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Terminating instance [ 1617.311280] env[62525]: DEBUG nova.compute.manager [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1617.311573] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1617.312943] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcc98d8-94da-4604-a141-20d44567078e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.323485] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1617.323806] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bc2769e-ac72-4e38-a396-7ba178f2cb0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.331735] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1617.331735] env[62525]: value = "task-1781614" [ 1617.331735] env[62525]: _type = "Task" [ 1617.331735] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.343110] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781614, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.365362] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.441960] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c3dbbf59-420b-4c7b-a455-464a9c6dd44c tempest-VolumesAssistedSnapshotsTest-433543630 tempest-VolumesAssistedSnapshotsTest-433543630-project-admin] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.250s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.492660] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781610, 'name': CloneVM_Task, 'duration_secs': 1.591133} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.492937] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Created linked-clone VM from snapshot [ 1617.493838] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a31026-0269-430c-8825-76d54147ce45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.501934] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Uploading image e14a9310-cc81-4e41-9ae5-6c102e6e82aa {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1617.524964] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1617.524964] env[62525]: value = "vm-369775" [ 1617.524964] env[62525]: _type = "VirtualMachine" [ 1617.524964] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1617.525637] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d2561884-8bf9-4b56-b64a-962f440d413c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.534182] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lease: (returnval){ [ 1617.534182] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525f5d25-27df-7263-f4f6-2ed6400b4e6c" [ 1617.534182] env[62525]: _type = "HttpNfcLease" [ 1617.534182] env[62525]: } obtained for exporting VM: (result){ [ 1617.534182] env[62525]: value = "vm-369775" [ 1617.534182] env[62525]: _type = "VirtualMachine" [ 1617.534182] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1617.534618] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the lease: (returnval){ [ 1617.534618] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525f5d25-27df-7263-f4f6-2ed6400b4e6c" [ 1617.534618] env[62525]: _type = "HttpNfcLease" [ 1617.534618] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1617.541132] env[62525]: INFO nova.compute.manager [-] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Took 1.32 seconds to deallocate network for instance. [ 1617.541355] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1617.541355] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525f5d25-27df-7263-f4f6-2ed6400b4e6c" [ 1617.541355] env[62525]: _type = "HttpNfcLease" [ 1617.541355] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1617.686338] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.686500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquired lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.686657] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1617.841352] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781614, 'name': PowerOffVM_Task, 'duration_secs': 0.20127} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.841684] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.841720] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1617.841971] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fde6049-3e1a-468d-b61c-de9175fd964a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.863431] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.012831] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.012831] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.012831] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleting the datastore file [datastore1] ad6179ad-bafb-42e7-932c-2aa4a5972c44 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.013118] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-952955d0-b89f-4716-898a-969c50363d80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.020140] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for the task: (returnval){ [ 1618.020140] env[62525]: value = "task-1781618" [ 1618.020140] env[62525]: _type = "Task" [ 1618.020140] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.030286] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.042295] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1618.042295] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525f5d25-27df-7263-f4f6-2ed6400b4e6c" [ 1618.042295] env[62525]: _type = "HttpNfcLease" [ 1618.042295] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1618.042596] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1618.042596] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525f5d25-27df-7263-f4f6-2ed6400b4e6c" [ 1618.042596] env[62525]: _type = "HttpNfcLease" [ 1618.042596] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1618.043430] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87de05da-1b75-443e-9150-4c35de9d86cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.046709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.046946] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.047175] env[62525]: DEBUG nova.objects.instance [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'resources' on Instance uuid 80cb1874-2fc8-41ef-b1af-da308f32a2b0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1618.051228] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1618.051396] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1618.144924] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-81071865-ea0e-4996-ae85-9fab32bded9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.219194] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1618.365840] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.367447] env[62525]: DEBUG nova.network.neutron [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Updating instance_info_cache with network_info: [{"id": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "address": "fa:16:3e:9d:6b:03", "network": {"id": "7296fb5e-c4f3-41d6-94dd-60809dd47954", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1399363247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee4f0dad260446a8f58605ce463957b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cfcc4-87", "ovs_interfaceid": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.531216] env[62525]: DEBUG oslo_vmware.api [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Task: {'id': task-1781618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155574} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.531614] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1618.531874] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1618.532142] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1618.532394] env[62525]: INFO nova.compute.manager [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1618.532693] env[62525]: DEBUG oslo.service.loopingcall [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.532946] env[62525]: DEBUG nova.compute.manager [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1618.533094] env[62525]: DEBUG nova.network.neutron [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1618.823328] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07541ef5-1072-4bd0-b91c-f91ba76f838a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.831441] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b3e0ae-6c89-4499-9c91-de7647c46c50 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.865920] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569b5341-ae62-4fe8-9851-1bd38baa68ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.869613] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Releasing lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.869982] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Instance network_info: |[{"id": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "address": "fa:16:3e:9d:6b:03", "network": {"id": "7296fb5e-c4f3-41d6-94dd-60809dd47954", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1399363247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee4f0dad260446a8f58605ce463957b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cfcc4-87", "ovs_interfaceid": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1618.870818] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:6b:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1618.880287] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Creating folder: Project (aee4f0dad260446a8f58605ce463957b). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1618.885637] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24adc9ae-ed1e-4b86-8255-1aa6c41d0b84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.887513] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.888837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed140fd-2f41-4872-94d8-9f5bb5313bbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.904358] env[62525]: DEBUG nova.compute.provider_tree [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.906905] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Created folder: Project (aee4f0dad260446a8f58605ce463957b) in parent group-v369553. [ 1618.907101] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Creating folder: Instances. Parent ref: group-v369776. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1618.907540] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-342f559e-8542-498a-9d5f-4468845f90df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.917978] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Created folder: Instances in parent group-v369776. [ 1618.918223] env[62525]: DEBUG oslo.service.loopingcall [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.918510] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1618.919025] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4ec020f-c0a0-471c-8d90-dc855df1491d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.938924] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1618.938924] env[62525]: value = "task-1781621" [ 1618.938924] env[62525]: _type = "Task" [ 1618.938924] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.946968] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781621, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.115951] env[62525]: DEBUG nova.compute.manager [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Received event network-changed-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1619.116170] env[62525]: DEBUG nova.compute.manager [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Refreshing instance network info cache due to event network-changed-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1619.116390] env[62525]: DEBUG oslo_concurrency.lockutils [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] Acquiring lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.116546] env[62525]: DEBUG oslo_concurrency.lockutils [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] Acquired lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.116724] env[62525]: DEBUG nova.network.neutron [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Refreshing network info cache for port ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1619.290638] env[62525]: DEBUG nova.network.neutron [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.373449] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.409643] env[62525]: DEBUG nova.scheduler.client.report [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1619.449112] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781621, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.793625] env[62525]: INFO nova.compute.manager [-] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Took 1.26 seconds to deallocate network for instance. [ 1619.834691] env[62525]: DEBUG nova.network.neutron [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Updated VIF entry in instance network info cache for port ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1619.835058] env[62525]: DEBUG nova.network.neutron [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Updating instance_info_cache with network_info: [{"id": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "address": "fa:16:3e:9d:6b:03", "network": {"id": "7296fb5e-c4f3-41d6-94dd-60809dd47954", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1399363247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee4f0dad260446a8f58605ce463957b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef6cfcc4-87", "ovs_interfaceid": "ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.875239] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.916504] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.939799] env[62525]: INFO nova.scheduler.client.report [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted allocations for instance 80cb1874-2fc8-41ef-b1af-da308f32a2b0 [ 1619.952429] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781621, 'name': CreateVM_Task, 'duration_secs': 0.527641} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.952608] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1619.953331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.953550] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.953880] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1619.954166] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-224d90f3-8971-465d-8f38-959c7571ea8a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.959156] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1619.959156] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529cccf0-78b0-51e3-e90f-72349424ef63" [ 1619.959156] env[62525]: _type = "Task" [ 1619.959156] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.969630] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cccf0-78b0-51e3-e90f-72349424ef63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.303142] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.303451] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.303645] env[62525]: DEBUG nova.objects.instance [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lazy-loading 'resources' on Instance uuid ad6179ad-bafb-42e7-932c-2aa4a5972c44 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.337912] env[62525]: DEBUG oslo_concurrency.lockutils [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] Releasing lock "refresh_cache-6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.338207] env[62525]: DEBUG nova.compute.manager [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Received event network-vif-deleted-eac3eb0e-85c9-4f32-be97-8c93422bdef2 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.338382] env[62525]: INFO nova.compute.manager [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Neutron deleted interface eac3eb0e-85c9-4f32-be97-8c93422bdef2; detaching it from the instance and deleting it from the info cache [ 1620.338544] env[62525]: DEBUG nova.network.neutron [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.373134] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.451544] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9adf662b-0c12-422d-86ae-9cfd2adb461e tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "80cb1874-2fc8-41ef-b1af-da308f32a2b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.378s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.470636] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cccf0-78b0-51e3-e90f-72349424ef63, 'name': SearchDatastore_Task, 'duration_secs': 0.011326} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.471010] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.471280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.471542] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.471692] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.471870] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.472138] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52fffb22-385f-42e2-a666-21c2605d6fbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.481148] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.481346] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1620.482117] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0e66713-b7a8-4328-8ad3-786ac9968153 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.487743] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1620.487743] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e2385c-70ac-a717-0b79-38ae0abced80" [ 1620.487743] env[62525]: _type = "Task" [ 1620.487743] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.496322] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e2385c-70ac-a717-0b79-38ae0abced80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.841876] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b460fd41-6888-42ac-af06-1d2b7aca091a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.855360] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c34460-0ad7-4020-a42f-2fea8eb9e00c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.878378] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.894926] env[62525]: DEBUG nova.compute.manager [req-55b1ccda-dea8-4d4b-8c95-e7ef1b4548b2 req-f1946860-fd47-485a-89a1-10e0c9370d71 service nova] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Detach interface failed, port_id=eac3eb0e-85c9-4f32-be97-8c93422bdef2, reason: Instance ad6179ad-bafb-42e7-932c-2aa4a5972c44 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1621.003194] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e2385c-70ac-a717-0b79-38ae0abced80, 'name': SearchDatastore_Task, 'duration_secs': 0.010142} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.007043] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5d980b-def4-47e5-95ca-5d2f51d3222b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.012836] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1621.012836] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b18c23-3560-7505-ff77-d87d2f854a17" [ 1621.012836] env[62525]: _type = "Task" [ 1621.012836] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.023706] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b18c23-3560-7505-ff77-d87d2f854a17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.061038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d74927-5150-4321-b7b9-ca858ed935b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.068289] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6e3347-7af4-4c99-84e9-c09e01335097 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.098316] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5fe0f7-8df0-4fad-9437-d7182d018f79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.106214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cc2ebe-a0c8-4326-bfc5-bf4f6ad75eb8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.124362] env[62525]: DEBUG nova.compute.provider_tree [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.382534] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.525591] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b18c23-3560-7505-ff77-d87d2f854a17, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.525837] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.526157] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7/6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.526455] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f92e8ef3-c9b0-4412-975c-c9821c3df811 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.533259] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1621.533259] env[62525]: value = "task-1781622" [ 1621.533259] env[62525]: _type = "Task" [ 1621.533259] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.541706] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.626563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.626869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.627133] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.627326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.627499] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.629819] env[62525]: DEBUG nova.scheduler.client.report [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1621.633973] env[62525]: INFO nova.compute.manager [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Terminating instance [ 1621.634927] env[62525]: DEBUG nova.compute.manager [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1621.635152] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1621.636065] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a32c7f-d570-409b-a9dd-e350e321b1b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.644273] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1621.644463] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed313210-2ee4-4eff-9dce-c357cf2c3e9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.651793] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1621.651793] env[62525]: value = "task-1781623" [ 1621.651793] env[62525]: _type = "Task" [ 1621.651793] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.660246] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.880297] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.043725] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781622, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.136335] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.163086] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781623, 'name': PowerOffVM_Task, 'duration_secs': 0.227621} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.164283] env[62525]: INFO nova.scheduler.client.report [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Deleted allocations for instance ad6179ad-bafb-42e7-932c-2aa4a5972c44 [ 1622.165568] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1622.165743] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1622.168511] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-045e53bf-2716-4233-a90b-2904caedc5fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.287836] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1622.288224] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1622.288547] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Deleting the datastore file [datastore1] e8864d73-35e6-490b-a07c-e8cac8baf880 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1622.288855] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-665e0afc-0068-4129-8341-bc9a4d363666 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.295835] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for the task: (returnval){ [ 1622.295835] env[62525]: value = "task-1781625" [ 1622.295835] env[62525]: _type = "Task" [ 1622.295835] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.305067] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.380285] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.544333] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.544618] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7/6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1622.544827] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1622.545090] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a89fec0c-6e16-413f-82c9-ef7b5f89f829 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.552169] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1622.552169] env[62525]: value = "task-1781626" [ 1622.552169] env[62525]: _type = "Task" [ 1622.552169] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.560800] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.674068] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e44eb450-7d40-4e6b-8cea-5736fb3a3dca tempest-ImagesTestJSON-1792439270 tempest-ImagesTestJSON-1792439270-project-member] Lock "ad6179ad-bafb-42e7-932c-2aa4a5972c44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.369s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.807884] env[62525]: DEBUG oslo_vmware.api [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Task: {'id': task-1781625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276427} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.808174] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1622.808427] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1622.808659] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1622.808849] env[62525]: INFO nova.compute.manager [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1622.809161] env[62525]: DEBUG oslo.service.loopingcall [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.809425] env[62525]: DEBUG nova.compute.manager [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1622.809542] env[62525]: DEBUG nova.network.neutron [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1622.879878] env[62525]: DEBUG oslo_vmware.api [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781613, 'name': ReconfigVM_Task, 'duration_secs': 6.331833} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.880155] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.880372] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Reconfigured VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1623.068088] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068751} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.068533] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1623.069639] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9a0057-ec55-463a-a239-75488303aafc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.094335] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7/6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1623.096548] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e9ffcb7-32f1-4ce6-9ebb-0e524a711d9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.115808] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1623.115808] env[62525]: value = "task-1781627" [ 1623.115808] env[62525]: _type = "Task" [ 1623.115808] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.125053] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781627, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.149827] env[62525]: DEBUG nova.compute.manager [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-deleted-fb59d389-465d-42dc-ba17-4c75d4f8acbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.150018] env[62525]: INFO nova.compute.manager [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Neutron deleted interface fb59d389-465d-42dc-ba17-4c75d4f8acbd; detaching it from the instance and deleting it from the info cache [ 1623.150312] env[62525]: DEBUG nova.network.neutron [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "address": "fa:16:3e:2b:00:3a", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2304276c-c5", "ovs_interfaceid": "2304276c-c5b3-41c9-b6c2-def8525f0cbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.628875] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781627, 'name': ReconfigVM_Task, 'duration_secs': 0.448995} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.628875] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7/6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.629381] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7dcc10e7-6e7e-4e74-876d-f316f6f3f853 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.637443] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1623.637443] env[62525]: value = "task-1781628" [ 1623.637443] env[62525]: _type = "Task" [ 1623.637443] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.649046] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781628, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.652661] env[62525]: DEBUG oslo_concurrency.lockutils [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.652844] env[62525]: DEBUG oslo_concurrency.lockutils [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] Acquired lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.653706] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df638121-3fb9-4aef-b614-4f6ebe4581ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.674861] env[62525]: DEBUG oslo_concurrency.lockutils [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] Releasing lock "6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.675133] env[62525]: WARNING nova.compute.manager [req-3375416d-de6c-4f48-afbc-8875f496c530 req-f1029c86-daa0-4639-ba27-7e45f04ff08a service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Detach interface failed, port_id=fb59d389-465d-42dc-ba17-4c75d4f8acbd, reason: No device with interface-id fb59d389-465d-42dc-ba17-4c75d4f8acbd exists on VM: nova.exception.NotFound: No device with interface-id fb59d389-465d-42dc-ba17-4c75d4f8acbd exists on VM [ 1623.677717] env[62525]: DEBUG nova.compute.manager [req-472eb2ad-da94-4f50-adba-1aa9dc5a15c9 req-9e65af24-09a6-4f81-bc1a-697ed179dd47 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Received event network-vif-deleted-c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.677824] env[62525]: INFO nova.compute.manager [req-472eb2ad-da94-4f50-adba-1aa9dc5a15c9 req-9e65af24-09a6-4f81-bc1a-697ed179dd47 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Neutron deleted interface c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca; detaching it from the instance and deleting it from the info cache [ 1623.677978] env[62525]: DEBUG nova.network.neutron [req-472eb2ad-da94-4f50-adba-1aa9dc5a15c9 req-9e65af24-09a6-4f81-bc1a-697ed179dd47 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.082307] env[62525]: DEBUG nova.network.neutron [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.119881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.122458] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.122458] env[62525]: DEBUG nova.network.neutron [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.152018] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781628, 'name': Rename_Task, 'duration_secs': 0.194146} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.152338] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1624.152592] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd8b202e-6ea9-4352-b720-850a7ea0905b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.159580] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1624.159580] env[62525]: value = "task-1781629" [ 1624.159580] env[62525]: _type = "Task" [ 1624.159580] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.171179] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.183355] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccede15d-432b-4888-b0b8-9bd8cb908819 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.193188] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4c03a0-5fb9-48ec-afa3-e427c2a3720d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.239054] env[62525]: DEBUG nova.compute.manager [req-472eb2ad-da94-4f50-adba-1aa9dc5a15c9 req-9e65af24-09a6-4f81-bc1a-697ed179dd47 service nova] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Detach interface failed, port_id=c9bb49cb-09e3-4fa1-a31f-f6099ce6f1ca, reason: Instance e8864d73-35e6-490b-a07c-e8cac8baf880 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1624.586491] env[62525]: INFO nova.compute.manager [-] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Took 1.77 seconds to deallocate network for instance. [ 1624.670405] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781629, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.711973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.711973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.712163] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "6be49426-ddda-461e-908f-593c0904b129-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.712318] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.713200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.715029] env[62525]: INFO nova.compute.manager [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Terminating instance [ 1624.716468] env[62525]: DEBUG nova.compute.manager [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1624.716672] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.717514] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4a70bb-171e-4b48-9a10-7448f99aaa63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.725366] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.725614] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-619aa572-51c3-4c71-b7f7-999452b65ceb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.733215] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1624.733215] env[62525]: value = "task-1781630" [ 1624.733215] env[62525]: _type = "Task" [ 1624.733215] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.744128] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.905122] env[62525]: INFO nova.network.neutron [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Port 2304276c-c5b3-41c9-b6c2-def8525f0cbc from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1624.906705] env[62525]: DEBUG nova.network.neutron [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [{"id": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "address": "fa:16:3e:e7:03:d5", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7fc668-05", "ovs_interfaceid": "7a7fc668-0509-45b5-954b-ce58cc91d1e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.092412] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.092706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.093639] env[62525]: DEBUG nova.objects.instance [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lazy-loading 'resources' on Instance uuid e8864d73-35e6-490b-a07c-e8cac8baf880 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1625.170494] env[62525]: DEBUG oslo_vmware.api [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781629, 'name': PowerOnVM_Task, 'duration_secs': 0.6308} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.170783] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1625.170985] env[62525]: INFO nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Took 8.36 seconds to spawn the instance on the hypervisor. [ 1625.171179] env[62525]: DEBUG nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1625.171952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15549432-2339-46fe-b876-516329440668 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.188367] env[62525]: DEBUG nova.compute.manager [req-120c4740-4b2d-46ed-8490-f45a766e85a7 req-9473accc-b4af-4c84-b2b9-fc18e6ff89bc service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-deleted-2304276c-c5b3-41c9-b6c2-def8525f0cbc {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1625.244280] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781630, 'name': PowerOffVM_Task, 'duration_secs': 0.250264} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.244528] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1625.244780] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1625.245071] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1c22575-04c4-4d5a-9588-da7155b4a4dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.367117] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1625.367385] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1625.367711] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleting the datastore file [datastore1] 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1625.368106] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bce084b-378b-4ad4-8ba0-99bb99fe1440 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.377976] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1625.377976] env[62525]: value = "task-1781632" [ 1625.377976] env[62525]: _type = "Task" [ 1625.377976] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.386863] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.408675] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-6be49426-ddda-461e-908f-593c0904b129" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.567062] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1625.567939] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabd1831-e8f3-4349-911c-45bc3ccfdc85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.573985] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1625.574204] env[62525]: ERROR oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk due to incomplete transfer. [ 1625.574411] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-71e15d59-deb9-459b-8747-865b6fdb6382 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.581275] env[62525]: DEBUG oslo_vmware.rw_handles [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e2f7b-a991-b359-ebd1-b893cccaece0/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1625.581543] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Uploaded image e14a9310-cc81-4e41-9ae5-6c102e6e82aa to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1625.584026] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1625.584026] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d67be978-5ec9-44e4-90c8-3d7cd9027b4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.589786] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1625.589786] env[62525]: value = "task-1781633" [ 1625.589786] env[62525]: _type = "Task" [ 1625.589786] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.600285] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.693086] env[62525]: INFO nova.compute.manager [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Took 13.27 seconds to build instance. [ 1625.836561] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2ca459-5766-41fc-8be9-4229964ddc69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.846318] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa247cc-9afc-4758-88ed-65bb462f7cac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.882728] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155c6ade-319a-4bab-98bc-d8d64bacb04a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.896647] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cfa760-99f5-4702-b295-5a969754b50b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.900943] env[62525]: DEBUG oslo_vmware.api [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180324} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.901229] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.901520] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.901664] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.902094] env[62525]: INFO nova.compute.manager [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 6be49426-ddda-461e-908f-593c0904b129] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1625.902094] env[62525]: DEBUG oslo.service.loopingcall [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.902732] env[62525]: DEBUG nova.compute.manager [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1625.902865] env[62525]: DEBUG nova.network.neutron [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.914416] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ba9f9fb-81dc-48c9-b3a5-d4e9d1a0f731 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-6be49426-ddda-461e-908f-593c0904b129-fb59d389-465d-42dc-ba17-4c75d4f8acbd" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.268s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.915171] env[62525]: DEBUG nova.compute.provider_tree [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.022317] env[62525]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 2304276c-c5b3-41c9-b6c2-def8525f0cbc could not be found.", "detail": ""}} {{(pid=62525) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1626.022445] env[62525]: DEBUG nova.network.neutron [-] Unable to show port 2304276c-c5b3-41c9-b6c2-def8525f0cbc as it no longer exists. {{(pid=62525) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1626.099643] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.196268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-584b6f2e-a0b9-4555-88be-1af7a661def3 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.781s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.422072] env[62525]: DEBUG nova.scheduler.client.report [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1626.536262] env[62525]: DEBUG nova.compute.manager [req-d3b2522a-5fa7-462b-9415-b7610b5dfadf req-c0faa63b-ab45-4fd8-985a-35c9d8ba88a5 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Received event network-vif-deleted-7a7fc668-0509-45b5-954b-ce58cc91d1e3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1626.536262] env[62525]: INFO nova.compute.manager [req-d3b2522a-5fa7-462b-9415-b7610b5dfadf req-c0faa63b-ab45-4fd8-985a-35c9d8ba88a5 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Neutron deleted interface 7a7fc668-0509-45b5-954b-ce58cc91d1e3; detaching it from the instance and deleting it from the info cache [ 1626.536262] env[62525]: DEBUG nova.network.neutron [req-d3b2522a-5fa7-462b-9415-b7610b5dfadf req-c0faa63b-ab45-4fd8-985a-35c9d8ba88a5 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.604840] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.933610] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.979952] env[62525]: INFO nova.scheduler.client.report [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Deleted allocations for instance e8864d73-35e6-490b-a07c-e8cac8baf880 [ 1626.998106] env[62525]: DEBUG nova.network.neutron [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.039887] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-257d92c5-5112-4faf-b5f8-d70c57bce4ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.049792] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5126581a-a3a0-40b5-a550-01ad7f49acc9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.087506] env[62525]: DEBUG nova.compute.manager [req-d3b2522a-5fa7-462b-9415-b7610b5dfadf req-c0faa63b-ab45-4fd8-985a-35c9d8ba88a5 service nova] [instance: 6be49426-ddda-461e-908f-593c0904b129] Detach interface failed, port_id=7a7fc668-0509-45b5-954b-ce58cc91d1e3, reason: Instance 6be49426-ddda-461e-908f-593c0904b129 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1627.101769] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.490300] env[62525]: DEBUG oslo_concurrency.lockutils [None req-215abf63-4073-48db-a97f-06330b10a642 tempest-VolumesAssistedSnapshotsTest-423173448 tempest-VolumesAssistedSnapshotsTest-423173448-project-member] Lock "e8864d73-35e6-490b-a07c-e8cac8baf880" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.863s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.503196] env[62525]: INFO nova.compute.manager [-] [instance: 6be49426-ddda-461e-908f-593c0904b129] Took 1.60 seconds to deallocate network for instance. [ 1627.607932] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.667592] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.667916] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.668182] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.668381] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.668560] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.670866] env[62525]: INFO nova.compute.manager [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Terminating instance [ 1627.675041] env[62525]: DEBUG nova.compute.manager [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1627.675414] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.676086] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07fc33c-79c3-43c7-8487-bd6d9bb7b042 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.684089] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.684650] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e326ebc6-68a6-4850-b09a-fd3b88f792b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.693022] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1627.693022] env[62525]: value = "task-1781634" [ 1627.693022] env[62525]: _type = "Task" [ 1627.693022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.698277] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781634, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.013229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.013973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.013973] env[62525]: DEBUG nova.objects.instance [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'resources' on Instance uuid 6be49426-ddda-461e-908f-593c0904b129 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.108262] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781633, 'name': Destroy_Task, 'duration_secs': 2.182994} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.109191] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Destroyed the VM [ 1628.111879] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1628.111879] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-10b50583-86aa-4e1e-9d6c-8fa95044e7b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.118034] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1628.118034] env[62525]: value = "task-1781635" [ 1628.118034] env[62525]: _type = "Task" [ 1628.118034] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.132597] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781635, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.201278] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781634, 'name': PowerOffVM_Task, 'duration_secs': 0.307841} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.203527] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.203768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1628.204094] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b20b9252-3589-43e0-9428-0bd3b1be5c9a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.300297] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.300652] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.300948] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Deleting the datastore file [datastore1] 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.301401] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70e12ee1-7bbe-4170-9e82-8597bbba6d8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.307770] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for the task: (returnval){ [ 1628.307770] env[62525]: value = "task-1781637" [ 1628.307770] env[62525]: _type = "Task" [ 1628.307770] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.315572] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.629320] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781635, 'name': RemoveSnapshot_Task, 'duration_secs': 0.407117} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.631608] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1628.631968] env[62525]: DEBUG nova.compute.manager [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1628.634870] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238d40fb-2c48-41f1-b22b-1efddd783f4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.825965] env[62525]: DEBUG oslo_vmware.api [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Task: {'id': task-1781637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160282} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.826304] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.826493] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1628.827209] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.827209] env[62525]: INFO nova.compute.manager [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1628.827209] env[62525]: DEBUG oslo.service.loopingcall [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.828029] env[62525]: DEBUG nova.compute.manager [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1628.828029] env[62525]: DEBUG nova.network.neutron [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.835541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a2c141-7d70-4de4-92c4-90a21d35f96d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.846193] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a280436d-f7a0-437e-8b90-d43f5d07465d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.881294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1aa4a5-447f-4f66-9719-2c8c1ea93f5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.890127] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9a491d-ecf2-4a77-b444-42ad69c8de7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.904081] env[62525]: DEBUG nova.compute.provider_tree [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.151486] env[62525]: INFO nova.compute.manager [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Shelve offloading [ 1629.154326] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.154326] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b964185-0db9-4c8e-9fdb-6caa57e78827 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.164582] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1629.164582] env[62525]: value = "task-1781638" [ 1629.164582] env[62525]: _type = "Task" [ 1629.164582] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.178094] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1629.180018] env[62525]: DEBUG nova.compute.manager [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1629.180018] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd31ce40-ffb0-4d23-8885-6ad347f6b613 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.186099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.186276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.186431] env[62525]: DEBUG nova.network.neutron [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1629.407750] env[62525]: DEBUG nova.scheduler.client.report [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.597308] env[62525]: DEBUG nova.compute.manager [req-3fe3694d-ffa0-411b-bd2e-cf165fd158e2 req-e7891825-f85e-4cba-a13c-2b1dfd1a7f5e service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Received event network-vif-deleted-ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.597308] env[62525]: INFO nova.compute.manager [req-3fe3694d-ffa0-411b-bd2e-cf165fd158e2 req-e7891825-f85e-4cba-a13c-2b1dfd1a7f5e service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Neutron deleted interface ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0; detaching it from the instance and deleting it from the info cache [ 1629.597308] env[62525]: DEBUG nova.network.neutron [req-3fe3694d-ffa0-411b-bd2e-cf165fd158e2 req-e7891825-f85e-4cba-a13c-2b1dfd1a7f5e service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.841441] env[62525]: DEBUG nova.network.neutron [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.912654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.968579] env[62525]: INFO nova.scheduler.client.report [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted allocations for instance 6be49426-ddda-461e-908f-593c0904b129 [ 1629.998410] env[62525]: DEBUG nova.network.neutron [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updating instance_info_cache with network_info: [{"id": "ff78f46c-a55c-4838-abfb-b3411ede9893", "address": "fa:16:3e:a1:fc:d7", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff78f46c-a5", "ovs_interfaceid": "ff78f46c-a55c-4838-abfb-b3411ede9893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.100990] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a113b942-3928-456d-86fd-33564c5177c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.113375] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdce732-7a94-401b-b3ee-86a27fd80690 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.153839] env[62525]: DEBUG nova.compute.manager [req-3fe3694d-ffa0-411b-bd2e-cf165fd158e2 req-e7891825-f85e-4cba-a13c-2b1dfd1a7f5e service nova] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Detach interface failed, port_id=ef6cfcc4-8716-40ff-9dd5-0f38ae41c0a0, reason: Instance 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1630.344296] env[62525]: INFO nova.compute.manager [-] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Took 1.52 seconds to deallocate network for instance. [ 1630.480203] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a2649c84-c62b-4d4a-8efa-00ad1eff8392 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "6be49426-ddda-461e-908f-593c0904b129" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.768s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.500377] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.855330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.855752] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.856124] env[62525]: DEBUG nova.objects.instance [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lazy-loading 'resources' on Instance uuid 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1630.985104] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1630.986368] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c38c6e3-65e8-4e78-a794-c3705217f8ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.994626] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1630.994933] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d22dd58e-b956-41a8-bcc0-b7383458dca1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.063192] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1631.063405] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1631.063599] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1631.064062] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43a1c854-3081-4ae2-9da7-41a9095a8267 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.074123] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.074489] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.078366] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1631.078366] env[62525]: value = "task-1781640" [ 1631.078366] env[62525]: _type = "Task" [ 1631.078366] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.091921] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.579290] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1631.598324] env[62525]: DEBUG oslo_vmware.api [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155742} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.598583] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1631.598765] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1631.599632] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1631.623975] env[62525]: DEBUG nova.compute.manager [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received event network-vif-unplugged-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.624251] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.624499] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.624674] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.624854] env[62525]: DEBUG nova.compute.manager [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] No waiting events found dispatching network-vif-unplugged-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1631.625864] env[62525]: WARNING nova.compute.manager [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received unexpected event network-vif-unplugged-ff78f46c-a55c-4838-abfb-b3411ede9893 for instance with vm_state shelved and task_state shelving_offloading. [ 1631.626178] env[62525]: DEBUG nova.compute.manager [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Received event network-changed-ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.626374] env[62525]: DEBUG nova.compute.manager [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Refreshing instance network info cache due to event network-changed-ff78f46c-a55c-4838-abfb-b3411ede9893. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1631.626566] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Acquiring lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.626705] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Acquired lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.626867] env[62525]: DEBUG nova.network.neutron [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Refreshing network info cache for port ff78f46c-a55c-4838-abfb-b3411ede9893 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1631.632541] env[62525]: INFO nova.scheduler.client.report [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 [ 1631.656383] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e3aa4b-1641-48f9-8ba8-a7f9c10f717c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.667562] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c1d724-c1e2-4d38-91f5-a790663224b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.700246] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e6284e-5430-40e6-95ae-bca01521393c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.708231] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d6a1f1-1e52-493b-8369-9643c069f5c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.723124] env[62525]: DEBUG nova.compute.provider_tree [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.116743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.138118] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.226770] env[62525]: DEBUG nova.scheduler.client.report [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1632.585863] env[62525]: DEBUG nova.network.neutron [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updated VIF entry in instance network info cache for port ff78f46c-a55c-4838-abfb-b3411ede9893. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1632.586377] env[62525]: DEBUG nova.network.neutron [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updating instance_info_cache with network_info: [{"id": "ff78f46c-a55c-4838-abfb-b3411ede9893", "address": "fa:16:3e:a1:fc:d7", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": null, "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapff78f46c-a5", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.635627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.732919] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.877s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.735968] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.620s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.737513] env[62525]: INFO nova.compute.claims [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1632.760280] env[62525]: INFO nova.scheduler.client.report [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Deleted allocations for instance 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7 [ 1633.094678] env[62525]: DEBUG oslo_concurrency.lockutils [req-4c7b7ff1-5249-44d6-aaef-725fdad3c1d7 req-fe88be37-d494-4328-bfa6-3271daed20d7 service nova] Releasing lock "refresh_cache-1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.269309] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5c08038f-2851-41b6-b614-71fc90b90a48 tempest-ServerMetadataTestJSON-112859155 tempest-ServerMetadataTestJSON-112859155-project-member] Lock "6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.600s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.785682] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.785986] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.789996] env[62525]: INFO nova.compute.manager [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Shelving [ 1633.864831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.865114] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.976274] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3ff124-2f52-467a-9826-cbbd7ccc718c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.983978] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd46f01-7428-4789-aef5-b3ff7ee4dead {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.014570] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb17998d-b0b8-4f8e-b645-e31dfeb69304 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.023244] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330a979d-35ab-41b7-b1c5-a5788bd75830 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.036101] env[62525]: DEBUG nova.compute.provider_tree [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.304120] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.304120] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96ec04bc-d840-452e-abc2-72f1f3ef9d17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.310299] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1634.310299] env[62525]: value = "task-1781641" [ 1634.310299] env[62525]: _type = "Task" [ 1634.310299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.317914] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.370531] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1634.541371] env[62525]: DEBUG nova.scheduler.client.report [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1634.819870] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781641, 'name': PowerOffVM_Task, 'duration_secs': 0.205552} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.823097] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1634.823097] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d022aa-e807-44a6-8cc2-e0401b7b1910 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.846544] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d143eaf-a646-4f9a-a6a7-00c46c86937c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.893750] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.049878] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.314s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.050578] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1635.056295] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.915s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.056295] env[62525]: DEBUG nova.objects.instance [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'resources' on Instance uuid 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.365317] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1635.365317] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-34131682-cb5c-4c14-9e95-7a582b0f68d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.375093] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1635.375093] env[62525]: value = "task-1781642" [ 1635.375093] env[62525]: _type = "Task" [ 1635.375093] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.385891] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781642, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.556256] env[62525]: DEBUG nova.compute.utils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1635.557815] env[62525]: DEBUG nova.objects.instance [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'numa_topology' on Instance uuid 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.558937] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1635.559181] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1635.617506] env[62525]: DEBUG nova.policy [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18bd37c3a74a4873a12092f31ccb07f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeeaeb287b194ebfb0c57e33ef138187', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1635.883116] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781642, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.890554] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully created port: befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1636.061568] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1636.063040] env[62525]: DEBUG nova.objects.base [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Object Instance<1003d1d2-3f2a-4c54-b8de-721a58ef2fd6> lazy-loaded attributes: resources,numa_topology {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1636.160331] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully created port: 43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1636.278747] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d7bb69-9798-46f1-9ed5-e13b59a6d975 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.286552] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e19e2c8-22b7-4556-8e35-177e0ac0c7d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.318190] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73a7138-a29e-462f-9018-63012744f869 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.326186] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb7ad86-f3a6-40aa-94fe-a321a9156246 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.340903] env[62525]: DEBUG nova.compute.provider_tree [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.383521] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781642, 'name': CreateSnapshot_Task, 'duration_secs': 0.947231} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.383703] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1636.384509] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdaf18f-0a90-4189-a6f2-db497b15448e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.430263] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully created port: aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1636.815426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.815631] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.844775] env[62525]: DEBUG nova.scheduler.client.report [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1636.902586] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1636.903369] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7680b6f8-6f74-44ee-9de2-9638abb074d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.912356] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1636.912356] env[62525]: value = "task-1781643" [ 1636.912356] env[62525]: _type = "Task" [ 1636.912356] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.920859] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781643, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.072645] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1637.102321] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1637.102595] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1637.102855] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1637.102971] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1637.103149] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1637.103306] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1637.103515] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1637.103690] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1637.103855] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1637.104067] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1637.104266] env[62525]: DEBUG nova.virt.hardware [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1637.105214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9b1e3a-d1f3-4487-b600-784926f5f3ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.113414] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467bf9dd-e237-40e1-bbd5-bfc438a7c47e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.318157] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1637.350617] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.353770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.460s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.355253] env[62525]: INFO nova.compute.claims [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1637.423181] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781643, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.840503] env[62525]: DEBUG nova.compute.manager [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-plugged-befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1637.840503] env[62525]: DEBUG oslo_concurrency.lockutils [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.840503] env[62525]: DEBUG oslo_concurrency.lockutils [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.840503] env[62525]: DEBUG oslo_concurrency.lockutils [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.840503] env[62525]: DEBUG nova.compute.manager [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] No waiting events found dispatching network-vif-plugged-befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1637.840961] env[62525]: WARNING nova.compute.manager [req-65471e3d-1435-42ba-bf09-15cf7c35ea36 req-e17fa34a-af9d-4ca0-be67-0268cabcabec service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received unexpected event network-vif-plugged-befd9821-2180-4d16-b3eb-fb5cd2996595 for instance with vm_state building and task_state spawning. [ 1637.844013] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.866675] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0b032b82-9efe-4824-a338-14af2f115d6b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.494s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.868500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.232s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.868500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.868500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.868500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.870280] env[62525]: INFO nova.compute.manager [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Terminating instance [ 1637.872372] env[62525]: DEBUG nova.compute.manager [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1637.872623] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1637.872892] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8833ac58-eaca-46be-aedf-ef4eb1a95314 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.882921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218a5893-059e-4586-8531-e72af1f8195e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.921471] env[62525]: WARNING nova.virt.vmwareapi.vmops [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6 could not be found. [ 1637.921745] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1637.921864] env[62525]: INFO nova.compute.manager [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1637.922117] env[62525]: DEBUG oslo.service.loopingcall [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1637.925361] env[62525]: DEBUG nova.compute.manager [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1637.925460] env[62525]: DEBUG nova.network.neutron [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1637.934011] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781643, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.953173] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully updated port: befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.430566] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781643, 'name': CloneVM_Task, 'duration_secs': 1.13709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.430722] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Created linked-clone VM from snapshot [ 1638.431456] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ff9926-21bd-4adf-9df9-d65c2f10b152 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.438361] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Uploading image 02017241-feb5-466c-b885-b516496197f8 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1638.463164] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1638.463164] env[62525]: value = "vm-369780" [ 1638.463164] env[62525]: _type = "VirtualMachine" [ 1638.463164] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1638.463420] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e45186f3-8ae7-4c96-bddf-ea0f10f8feb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.470325] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lease: (returnval){ [ 1638.470325] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52525143-6ba2-dd02-08da-f9aecd25c97f" [ 1638.470325] env[62525]: _type = "HttpNfcLease" [ 1638.470325] env[62525]: } obtained for exporting VM: (result){ [ 1638.470325] env[62525]: value = "vm-369780" [ 1638.470325] env[62525]: _type = "VirtualMachine" [ 1638.470325] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1638.470562] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the lease: (returnval){ [ 1638.470562] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52525143-6ba2-dd02-08da-f9aecd25c97f" [ 1638.470562] env[62525]: _type = "HttpNfcLease" [ 1638.470562] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1638.480735] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1638.480735] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52525143-6ba2-dd02-08da-f9aecd25c97f" [ 1638.480735] env[62525]: _type = "HttpNfcLease" [ 1638.480735] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1638.561914] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79f5dc2-8864-4ed3-908d-e9587ea8aafd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.569414] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cb378f-0b0f-4ab8-a2a8-a0e5aa061fbe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.601044] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19ffe33-85bc-4560-9b72-7ad81639d0af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.608479] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc5dfe0-3b93-453a-882d-e45211f6a9ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.622065] env[62525]: DEBUG nova.compute.provider_tree [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1638.787053] env[62525]: DEBUG nova.network.neutron [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.980981] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1638.980981] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52525143-6ba2-dd02-08da-f9aecd25c97f" [ 1638.980981] env[62525]: _type = "HttpNfcLease" [ 1638.980981] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1638.981378] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1638.981378] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52525143-6ba2-dd02-08da-f9aecd25c97f" [ 1638.981378] env[62525]: _type = "HttpNfcLease" [ 1638.981378] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1638.981980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354c5705-9d67-4e17-a0ee-2b875b391391 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.988965] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1638.989153] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1639.083082] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-73997510-4183-477d-985f-44005b0387ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.126030] env[62525]: DEBUG nova.scheduler.client.report [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1639.288726] env[62525]: INFO nova.compute.manager [-] [instance: 1003d1d2-3f2a-4c54-b8de-721a58ef2fd6] Took 1.36 seconds to deallocate network for instance. [ 1639.632288] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.632865] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1639.635954] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.792s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.637430] env[62525]: INFO nova.compute.claims [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1639.871119] env[62525]: DEBUG nova.compute.manager [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-changed-befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1639.871591] env[62525]: DEBUG nova.compute.manager [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing instance network info cache due to event network-changed-befd9821-2180-4d16-b3eb-fb5cd2996595. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1639.872094] env[62525]: DEBUG oslo_concurrency.lockutils [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] Acquiring lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.872373] env[62525]: DEBUG oslo_concurrency.lockutils [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] Acquired lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.872621] env[62525]: DEBUG nova.network.neutron [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing network info cache for port befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1639.964969] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully updated port: 43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1640.141847] env[62525]: DEBUG nova.compute.utils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1640.146427] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1640.146574] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1640.197122] env[62525]: DEBUG nova.policy [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1640.315133] env[62525]: DEBUG oslo_concurrency.lockutils [None req-defa2bae-e402-4307-99eb-0905e77026c0 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "1003d1d2-3f2a-4c54-b8de-721a58ef2fd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.447s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.414492] env[62525]: DEBUG nova.network.neutron [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.480230] env[62525]: DEBUG nova.compute.manager [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-plugged-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1640.480230] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.480230] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.480230] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.480230] env[62525]: DEBUG nova.compute.manager [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] No waiting events found dispatching network-vif-plugged-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1640.480230] env[62525]: WARNING nova.compute.manager [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received unexpected event network-vif-plugged-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba for instance with vm_state building and task_state spawning. [ 1640.480619] env[62525]: DEBUG nova.compute.manager [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-changed-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1640.480619] env[62525]: DEBUG nova.compute.manager [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing instance network info cache due to event network-changed-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1640.480954] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Acquiring lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.558896] env[62525]: DEBUG nova.network.neutron [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.644944] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1640.723029] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Successfully created port: c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1640.930470] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa16f76-1890-47f7-bbf1-ce355f66b32b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.940252] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834c4ff4-12cf-44ae-ba8e-e05c3d892141 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.977524] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d334397-7d5b-4ad6-b491-63be1074edfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.985547] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb2d04c-f12e-4c6a-bc64-7af92c57d5fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.999818] env[62525]: DEBUG nova.compute.provider_tree [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.063033] env[62525]: DEBUG oslo_concurrency.lockutils [req-71fc3e3a-8bfd-4a4a-a04a-a8a6edcd8e98 req-cf8933cb-201c-4e9d-99a2-5c485bdf81aa service nova] Releasing lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.063239] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Acquired lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.063386] env[62525]: DEBUG nova.network.neutron [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing network info cache for port 43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.397661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.398053] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.503962] env[62525]: DEBUG nova.scheduler.client.report [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1641.605083] env[62525]: DEBUG nova.network.neutron [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1641.655175] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1641.682032] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1641.682360] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1641.682572] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1641.682804] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1641.682978] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1641.683193] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1641.683430] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1641.683665] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1641.683862] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1641.684176] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1641.684431] env[62525]: DEBUG nova.virt.hardware [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1641.685464] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2b9260-732d-4152-b82f-b57a717b2245 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.694659] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7454ce7d-a30e-46f1-9263-03d77db95390 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.701050] env[62525]: DEBUG nova.network.neutron [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.900609] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1642.009348] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.010112] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1642.020435] env[62525]: DEBUG nova.compute.manager [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-plugged-aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1642.020435] env[62525]: DEBUG oslo_concurrency.lockutils [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.020702] env[62525]: DEBUG oslo_concurrency.lockutils [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.020933] env[62525]: DEBUG oslo_concurrency.lockutils [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.021518] env[62525]: DEBUG nova.compute.manager [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] No waiting events found dispatching network-vif-plugged-aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1642.021518] env[62525]: WARNING nova.compute.manager [req-17d078f2-661b-4c02-bedd-cfebbdbf27c4 req-ace807dd-f3b6-4d7c-8112-42f33ba030ea service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received unexpected event network-vif-plugged-aa34451f-78cd-485b-9a6a-4ff6e664707f for instance with vm_state building and task_state spawning. [ 1642.107060] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Successfully updated port: aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1642.212179] env[62525]: DEBUG oslo_concurrency.lockutils [req-83be90af-bfec-4131-a014-965212b4112f req-ea5bad2d-1943-480b-b448-f34646ba3241 service nova] Releasing lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.331902] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Successfully updated port: c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1642.426975] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.427309] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.428654] env[62525]: INFO nova.compute.claims [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1642.495985] env[62525]: DEBUG nova.compute.manager [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-vif-plugged-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1642.496243] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.496476] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.496642] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.496806] env[62525]: DEBUG nova.compute.manager [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] No waiting events found dispatching network-vif-plugged-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1642.496991] env[62525]: WARNING nova.compute.manager [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received unexpected event network-vif-plugged-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c for instance with vm_state building and task_state spawning. [ 1642.497166] env[62525]: DEBUG nova.compute.manager [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1642.497320] env[62525]: DEBUG nova.compute.manager [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1642.497513] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.497651] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.497804] env[62525]: DEBUG nova.network.neutron [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1642.519140] env[62525]: DEBUG nova.compute.utils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1642.520599] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1642.520780] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1642.574562] env[62525]: DEBUG nova.policy [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd906b2c1755b466991ea7d22fa90df8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e48ef541f0e4b689d5d86782efb8db5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1642.612862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.612986] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.613125] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1642.835411] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.939379] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Successfully created port: 3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.024387] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1643.035410] env[62525]: DEBUG nova.network.neutron [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.119748] env[62525]: DEBUG nova.network.neutron [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.155993] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.585228] env[62525]: DEBUG nova.network.neutron [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updating instance_info_cache with network_info: [{"id": "befd9821-2180-4d16-b3eb-fb5cd2996595", "address": "fa:16:3e:1e:70:92", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefd9821-21", "ovs_interfaceid": "befd9821-2180-4d16-b3eb-fb5cd2996595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "address": "fa:16:3e:88:cd:cc", "network": {"id": "46d7ccb6-f69e-4158-8b6e-05bf7b2d4265", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-267750613", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bbe938-60", "ovs_interfaceid": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "address": "fa:16:3e:ea:95:d7", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa34451f-78", "ovs_interfaceid": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.622995] env[62525]: DEBUG oslo_concurrency.lockutils [req-7468226a-5a38-4c94-bfb5-deb0d7b25e1f req-31880f70-c75c-4a76-9a4e-3bbacb4260b1 service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.623470] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.623686] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1643.683073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36735cec-7258-49eb-8983-c4df54cda58d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.692802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78512c6-2fd0-43b2-b6fe-84c9f637ee4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.726616] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daec934b-399a-4c2b-b88b-bdbc0a6f41ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.735132] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649dfd27-680a-4309-9dc7-81c8d6adedf9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.748754] env[62525]: DEBUG nova.compute.provider_tree [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.038885] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1644.050477] env[62525]: DEBUG nova.compute.manager [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-changed-aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1644.050697] env[62525]: DEBUG nova.compute.manager [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing instance network info cache due to event network-changed-aa34451f-78cd-485b-9a6a-4ff6e664707f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1644.050920] env[62525]: DEBUG oslo_concurrency.lockutils [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] Acquiring lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.065030] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1644.065292] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1644.065450] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1644.065629] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1644.065773] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1644.065917] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1644.066148] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1644.066308] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1644.066473] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1644.066634] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1644.066804] env[62525]: DEBUG nova.virt.hardware [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1644.067716] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cdaaba-ad31-4688-9951-82892883426a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.076709] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c71764-3c58-42ad-b596-2afa34934a7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.090743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.091179] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance network_info: |[{"id": "befd9821-2180-4d16-b3eb-fb5cd2996595", "address": "fa:16:3e:1e:70:92", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefd9821-21", "ovs_interfaceid": "befd9821-2180-4d16-b3eb-fb5cd2996595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "address": "fa:16:3e:88:cd:cc", "network": {"id": "46d7ccb6-f69e-4158-8b6e-05bf7b2d4265", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-267750613", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bbe938-60", "ovs_interfaceid": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "address": "fa:16:3e:ea:95:d7", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa34451f-78", "ovs_interfaceid": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1644.091663] env[62525]: DEBUG oslo_concurrency.lockutils [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] Acquired lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.091842] env[62525]: DEBUG nova.network.neutron [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Refreshing network info cache for port aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1644.093267] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:70:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'befd9821-2180-4d16-b3eb-fb5cd2996595', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:cd:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a8f5363-be3a-4f92-9ccf-33bb0c8113b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43bbe938-60f4-4e29-9eb3-2eeecc0f7dba', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:95:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22390021-1742-415d-b442-811550d09927', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa34451f-78cd-485b-9a6a-4ff6e664707f', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1644.104130] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Creating folder: Project (aeeaeb287b194ebfb0c57e33ef138187). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1644.105175] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b70747eb-b9cd-425e-af3f-f5dda4f31ad4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.117149] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Created folder: Project (aeeaeb287b194ebfb0c57e33ef138187) in parent group-v369553. [ 1644.117340] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Creating folder: Instances. Parent ref: group-v369781. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1644.117561] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b55e5143-89ee-4186-888b-4836970a25e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.128950] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Created folder: Instances in parent group-v369781. [ 1644.129192] env[62525]: DEBUG oslo.service.loopingcall [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.129732] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1644.129942] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-811714a9-8f66-43cc-9fc2-03028defdb85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.154971] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1644.154971] env[62525]: value = "task-1781647" [ 1644.154971] env[62525]: _type = "Task" [ 1644.154971] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.159065] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1644.166237] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781647, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.252379] env[62525]: DEBUG nova.scheduler.client.report [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.319036] env[62525]: DEBUG nova.network.neutron [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.664807] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781647, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.667343] env[62525]: DEBUG nova.compute.manager [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Received event network-vif-plugged-3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1644.667776] env[62525]: DEBUG oslo_concurrency.lockutils [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] Acquiring lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.668088] env[62525]: DEBUG oslo_concurrency.lockutils [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.668333] env[62525]: DEBUG oslo_concurrency.lockutils [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.668579] env[62525]: DEBUG nova.compute.manager [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] No waiting events found dispatching network-vif-plugged-3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1644.668813] env[62525]: WARNING nova.compute.manager [req-80246f5f-835d-466d-86ef-c27e81b91dd3 req-67f88e8b-505a-41d3-b9d1-0b653578fe8f service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Received unexpected event network-vif-plugged-3cc455e7-72d7-4e09-ba23-155358d0b956 for instance with vm_state building and task_state spawning. [ 1644.757041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.757561] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1644.790025] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Successfully updated port: 3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1644.822062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.822435] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance network_info: |[{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1644.823282] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:79:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1644.831358] env[62525]: DEBUG oslo.service.loopingcall [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.831982] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1644.832376] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68bc80fd-e794-47c7-8429-2ea0731281ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.853404] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1644.853404] env[62525]: value = "task-1781648" [ 1644.853404] env[62525]: _type = "Task" [ 1644.853404] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.854428] env[62525]: DEBUG nova.network.neutron [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updated VIF entry in instance network info cache for port aa34451f-78cd-485b-9a6a-4ff6e664707f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1644.854965] env[62525]: DEBUG nova.network.neutron [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updating instance_info_cache with network_info: [{"id": "befd9821-2180-4d16-b3eb-fb5cd2996595", "address": "fa:16:3e:1e:70:92", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbefd9821-21", "ovs_interfaceid": "befd9821-2180-4d16-b3eb-fb5cd2996595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "address": "fa:16:3e:88:cd:cc", "network": {"id": "46d7ccb6-f69e-4158-8b6e-05bf7b2d4265", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-267750613", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bbe938-60", "ovs_interfaceid": "43bbe938-60f4-4e29-9eb3-2eeecc0f7dba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "address": "fa:16:3e:ea:95:d7", "network": {"id": "021813f6-a7de-4dd5-85ec-8dab19be9697", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1956489088", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22390021-1742-415d-b442-811550d09927", "external-id": "nsx-vlan-transportzone-347", "segmentation_id": 347, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa34451f-78", "ovs_interfaceid": "aa34451f-78cd-485b-9a6a-4ff6e664707f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.866258] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781648, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.164892] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781647, 'name': CreateVM_Task, 'duration_secs': 0.852172} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.165142] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1645.166095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.166268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.166593] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1645.166850] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6db4ab8-1228-45f2-bc17-74ee22c3f77c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.172126] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1645.172126] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b741e6-8c2a-4587-b3ce-090920f89bff" [ 1645.172126] env[62525]: _type = "Task" [ 1645.172126] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.180480] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b741e6-8c2a-4587-b3ce-090920f89bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.263244] env[62525]: DEBUG nova.compute.utils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1645.264892] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1645.265085] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1645.295223] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.295357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.295510] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1645.311603] env[62525]: DEBUG nova.policy [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1645.360898] env[62525]: DEBUG oslo_concurrency.lockutils [req-91485f5a-7f98-49ad-ae5b-f7bb718a84c0 req-ed3a3085-9dd7-4a51-8083-bb4c2e167a4b service nova] Releasing lock "refresh_cache-4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.365819] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781648, 'name': CreateVM_Task, 'duration_secs': 0.371642} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.365980] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1645.366678] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.567389] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Successfully created port: 85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1645.684906] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b741e6-8c2a-4587-b3ce-090920f89bff, 'name': SearchDatastore_Task, 'duration_secs': 0.011135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.685321] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.685530] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1645.685768] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.685907] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.686090] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1645.686380] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.686681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1645.686925] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb10bea-6e5e-480a-a207-32154081cd60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.688813] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9583996c-7c36-44d8-849c-8416a0888f03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.694512] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1645.694512] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52563f83-e3d3-2176-3b96-5bb58dbdf6d8" [ 1645.694512] env[62525]: _type = "Task" [ 1645.694512] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.698846] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1645.699035] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1645.700215] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d54d9461-b5b2-49f5-b456-21cd2b2b940e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.705756] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52563f83-e3d3-2176-3b96-5bb58dbdf6d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.709009] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1645.709009] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d55abc-9b94-083c-717a-d51c9ae502fb" [ 1645.709009] env[62525]: _type = "Task" [ 1645.709009] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.717583] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d55abc-9b94-083c-717a-d51c9ae502fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.768476] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1645.845534] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.064039] env[62525]: DEBUG nova.network.neutron [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Updating instance_info_cache with network_info: [{"id": "3cc455e7-72d7-4e09-ba23-155358d0b956", "address": "fa:16:3e:26:06:55", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc455e7-72", "ovs_interfaceid": "3cc455e7-72d7-4e09-ba23-155358d0b956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.209651] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52563f83-e3d3-2176-3b96-5bb58dbdf6d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010488} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.210221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.210574] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.210899] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.224694] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d55abc-9b94-083c-717a-d51c9ae502fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008941} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.225877] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ffb881-6418-4748-836f-39ac9e51b0ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.236915] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1646.236915] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e6a738-fa67-8ae9-a537-648fe86b8196" [ 1646.236915] env[62525]: _type = "Task" [ 1646.236915] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.244406] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e6a738-fa67-8ae9-a537-648fe86b8196, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.567202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.567576] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Instance network_info: |[{"id": "3cc455e7-72d7-4e09-ba23-155358d0b956", "address": "fa:16:3e:26:06:55", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc455e7-72", "ovs_interfaceid": "3cc455e7-72d7-4e09-ba23-155358d0b956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1646.567986] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:06:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cc455e7-72d7-4e09-ba23-155358d0b956', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1646.575798] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Creating folder: Project (7e48ef541f0e4b689d5d86782efb8db5). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1646.576474] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2daef61-fd1f-4d7a-8fbb-f377115ffec0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.588783] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Created folder: Project (7e48ef541f0e4b689d5d86782efb8db5) in parent group-v369553. [ 1646.589052] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Creating folder: Instances. Parent ref: group-v369785. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1646.589415] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bb39138-8429-4485-8d29-8a6dd2d1ba39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.600486] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Created folder: Instances in parent group-v369785. [ 1646.600726] env[62525]: DEBUG oslo.service.loopingcall [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1646.600924] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1646.601166] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8998d92e-599f-44a1-b5fe-ae43219066ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.622107] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1646.622107] env[62525]: value = "task-1781651" [ 1646.622107] env[62525]: _type = "Task" [ 1646.622107] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.630965] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781651, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.701231] env[62525]: DEBUG nova.compute.manager [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Received event network-changed-3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1646.701534] env[62525]: DEBUG nova.compute.manager [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Refreshing instance network info cache due to event network-changed-3cc455e7-72d7-4e09-ba23-155358d0b956. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1646.701671] env[62525]: DEBUG oslo_concurrency.lockutils [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] Acquiring lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.701858] env[62525]: DEBUG oslo_concurrency.lockutils [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] Acquired lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.701975] env[62525]: DEBUG nova.network.neutron [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Refreshing network info cache for port 3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1646.746328] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e6a738-fa67-8ae9-a537-648fe86b8196, 'name': SearchDatastore_Task, 'duration_secs': 0.020494} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.746635] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.746909] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8/4e52e21e-4db3-45e5-b88d-455d1b8ea5c8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1646.747221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.747432] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.747646] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae99041e-9d58-4315-a86b-c913a2f5eb92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.749695] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71e2ca80-b7f7-4d92-9425-47ab91f62b6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.759858] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1646.759858] env[62525]: value = "task-1781652" [ 1646.759858] env[62525]: _type = "Task" [ 1646.759858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.760909] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.761094] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.764760] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acb9b5c5-886b-4f4d-963b-f0b6ae4e579b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.770481] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1646.770481] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f1a2c1-bf67-8be4-0ac8-794e3253a8d8" [ 1646.770481] env[62525]: _type = "Task" [ 1646.770481] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.774703] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.778673] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1646.786563] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f1a2c1-bf67-8be4-0ac8-794e3253a8d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.809369] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1646.809740] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1646.809740] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1646.809926] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1646.810079] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1646.810228] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1646.810439] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1646.810596] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1646.810763] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1646.810922] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1646.811105] env[62525]: DEBUG nova.virt.hardware [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1646.811982] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd5500e-ffb8-47eb-a4ad-637cc8829563 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.820101] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e5acde-e6e7-43c9-912a-8852fc627693 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.032283] env[62525]: DEBUG nova.compute.manager [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Received event network-vif-plugged-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.032674] env[62525]: DEBUG oslo_concurrency.lockutils [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] Acquiring lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.032910] env[62525]: DEBUG oslo_concurrency.lockutils [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.033547] env[62525]: DEBUG oslo_concurrency.lockutils [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.033745] env[62525]: DEBUG nova.compute.manager [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] No waiting events found dispatching network-vif-plugged-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1647.034037] env[62525]: WARNING nova.compute.manager [req-cdfe71a8-5d5a-44e1-b946-237348108959 req-b7b707bc-3b0b-4089-a24a-57dcd5a1d483 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Received unexpected event network-vif-plugged-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 for instance with vm_state building and task_state spawning. [ 1647.134460] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781651, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.135438] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Successfully updated port: 85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1647.272280] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781652, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.286235] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f1a2c1-bf67-8be4-0ac8-794e3253a8d8, 'name': SearchDatastore_Task, 'duration_secs': 0.028006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.287406] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41dc5aeb-9d41-4eae-ba7e-5b5756d5fd02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.293310] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1647.293310] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520ea431-5d76-1033-fe73-c769d1247937" [ 1647.293310] env[62525]: _type = "Task" [ 1647.293310] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.304272] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ea431-5d76-1033-fe73-c769d1247937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.357057] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1647.358081] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55971a01-364a-4723-bdc3-fdf6416b4a40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.364917] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1647.365094] env[62525]: ERROR oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk due to incomplete transfer. [ 1647.365338] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-df7d993d-b24b-4eb9-a756-cf8e8664b988 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.374822] env[62525]: DEBUG oslo_vmware.rw_handles [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a9015a-9c6c-5c93-b382-1dacda271218/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1647.374934] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Uploaded image 02017241-feb5-466c-b885-b516496197f8 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1647.377422] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1647.377723] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f0972fe3-3088-4983-bca7-28fc3a2b7ab5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.385386] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1647.385386] env[62525]: value = "task-1781653" [ 1647.385386] env[62525]: _type = "Task" [ 1647.385386] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.394912] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781653, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.437091] env[62525]: DEBUG nova.network.neutron [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Updated VIF entry in instance network info cache for port 3cc455e7-72d7-4e09-ba23-155358d0b956. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1647.437515] env[62525]: DEBUG nova.network.neutron [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Updating instance_info_cache with network_info: [{"id": "3cc455e7-72d7-4e09-ba23-155358d0b956", "address": "fa:16:3e:26:06:55", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cc455e7-72", "ovs_interfaceid": "3cc455e7-72d7-4e09-ba23-155358d0b956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.633455] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781651, 'name': CreateVM_Task, 'duration_secs': 0.729735} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.633633] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1647.634371] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.635214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.635214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1647.635214] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3c417cb-ef03-4758-a8bb-107909c368bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.637712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.637848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.637991] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.640407] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1647.640407] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5243f641-46a1-815d-d8bf-54997a5e5d07" [ 1647.640407] env[62525]: _type = "Task" [ 1647.640407] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.656268] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5243f641-46a1-815d-d8bf-54997a5e5d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.771268] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.905864} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.771594] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8/4e52e21e-4db3-45e5-b88d-455d1b8ea5c8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1647.771840] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1647.772156] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca834ce6-4ac6-4056-abcf-466318516657 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.779587] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1647.779587] env[62525]: value = "task-1781654" [ 1647.779587] env[62525]: _type = "Task" [ 1647.779587] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.787674] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.802924] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ea431-5d76-1033-fe73-c769d1247937, 'name': SearchDatastore_Task, 'duration_secs': 0.105406} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.803187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.803443] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4/c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.803689] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62234d66-ba72-4531-adf2-528dcf66b289 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.810528] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1647.810528] env[62525]: value = "task-1781655" [ 1647.810528] env[62525]: _type = "Task" [ 1647.810528] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.818223] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.897743] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781653, 'name': Destroy_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.941213] env[62525]: DEBUG oslo_concurrency.lockutils [req-3a566cb7-3ec5-4b8a-9a53-5b78f30e74b1 req-d20dc9f4-3f0b-4292-902c-2f79174908f4 service nova] Releasing lock "refresh_cache-3ef2dbbe-0cf3-4098-91d8-e206a872bd08" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.154391] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.154924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.154924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.155239] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.155339] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.157097] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5243f641-46a1-815d-d8bf-54997a5e5d07, 'name': SearchDatastore_Task, 'duration_secs': 0.014127} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.157421] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.157679] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.157939] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.158122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.158321] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1648.158609] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fccded48-b6b8-4946-8e1f-45f91e8bcefd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.161709] env[62525]: INFO nova.compute.manager [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Terminating instance [ 1648.164719] env[62525]: DEBUG nova.compute.manager [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1648.165382] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1648.166236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399a10eb-b432-4523-892c-70ed754f02cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.175878] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1648.176403] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5f6ead5-0130-4855-a1fa-c6866f639df1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.178298] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1648.185674] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1648.185674] env[62525]: value = "task-1781656" [ 1648.185674] env[62525]: _type = "Task" [ 1648.185674] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.186891] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1648.187093] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1648.190730] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58972e10-6bd9-4395-aad3-07602931e8e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.201278] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1648.201278] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e29b9d-4b35-9747-201f-589f03da84ad" [ 1648.201278] env[62525]: _type = "Task" [ 1648.201278] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.201577] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.212475] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e29b9d-4b35-9747-201f-589f03da84ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.290162] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081227} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.290480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.291354] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e26872d-6cee-4115-954b-73ab2a5c76bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.321946] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8/4e52e21e-4db3-45e5-b88d-455d1b8ea5c8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.325361] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-107c7297-a33a-4893-8b73-c2416c129531 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.346196] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781655, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.347682] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1648.347682] env[62525]: value = "task-1781657" [ 1648.347682] env[62525]: _type = "Task" [ 1648.347682] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.351630] env[62525]: DEBUG nova.network.neutron [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Updating instance_info_cache with network_info: [{"id": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "address": "fa:16:3e:99:b0:c2", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85a6973b-d6", "ovs_interfaceid": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.358572] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.398267] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781653, 'name': Destroy_Task, 'duration_secs': 0.832718} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.398554] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Destroyed the VM [ 1648.398813] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1648.399094] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7d2db52e-2fcb-42bd-8044-212b2d6ae3f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.407071] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1648.407071] env[62525]: value = "task-1781658" [ 1648.407071] env[62525]: _type = "Task" [ 1648.407071] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.416858] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781658, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.697758] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781656, 'name': PowerOffVM_Task, 'duration_secs': 0.253604} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.698037] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1648.698257] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1648.698631] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68a3c16d-3612-4bda-851b-fee509d8124f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.710952] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e29b9d-4b35-9747-201f-589f03da84ad, 'name': SearchDatastore_Task, 'duration_secs': 0.08523} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.711722] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0e6fa87-30b4-413c-84ca-c03a697da6ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.716819] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1648.716819] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d08f41-4861-843b-8359-400a77e5ab98" [ 1648.716819] env[62525]: _type = "Task" [ 1648.716819] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.724172] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d08f41-4861-843b-8359-400a77e5ab98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.788348] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1648.788740] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1648.788740] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleting the datastore file [datastore1] 7a92bac8-9cee-41ed-81e3-08b48432fe7c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1648.789654] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5e368ad-3238-493d-b195-a900d0da4645 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.795407] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for the task: (returnval){ [ 1648.795407] env[62525]: value = "task-1781660" [ 1648.795407] env[62525]: _type = "Task" [ 1648.795407] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.803818] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.821360] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781655, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663355} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.821628] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4/c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.821835] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.822129] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-397e286c-a77e-4929-95b5-38889b97e3b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.828589] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1648.828589] env[62525]: value = "task-1781661" [ 1648.828589] env[62525]: _type = "Task" [ 1648.828589] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.836190] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.857562] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.857906] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Instance network_info: |[{"id": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "address": "fa:16:3e:99:b0:c2", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85a6973b-d6", "ovs_interfaceid": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1648.858243] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781657, 'name': ReconfigVM_Task, 'duration_secs': 0.316299} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.858629] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:b0:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85a6973b-d6f5-4c81-b449-8ab01aa9a0f3', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1648.866805] env[62525]: DEBUG oslo.service.loopingcall [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1648.867098] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8/4e52e21e-4db3-45e5-b88d-455d1b8ea5c8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1648.867760] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1648.867959] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-091c86dc-8085-4f21-927d-f1333b0e55bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.869720] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e6ceecb-1579-4e40-b507-5141a92044b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.890431] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1648.890431] env[62525]: value = "task-1781663" [ 1648.890431] env[62525]: _type = "Task" [ 1648.890431] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.891728] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1648.891728] env[62525]: value = "task-1781662" [ 1648.891728] env[62525]: _type = "Task" [ 1648.891728] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.902460] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781663, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.905451] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781662, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.915574] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781658, 'name': RemoveSnapshot_Task, 'duration_secs': 0.433045} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.915817] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1648.916084] env[62525]: DEBUG nova.compute.manager [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1648.916793] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0258e848-348d-4740-bfb6-0f7884a68cc5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.067261] env[62525]: DEBUG nova.compute.manager [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Received event network-changed-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1649.068058] env[62525]: DEBUG nova.compute.manager [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Refreshing instance network info cache due to event network-changed-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1649.068671] env[62525]: DEBUG oslo_concurrency.lockutils [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] Acquiring lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.069039] env[62525]: DEBUG oslo_concurrency.lockutils [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] Acquired lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.069435] env[62525]: DEBUG nova.network.neutron [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Refreshing network info cache for port 85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1649.229188] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d08f41-4861-843b-8359-400a77e5ab98, 'name': SearchDatastore_Task, 'duration_secs': 0.011462} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.229677] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.230320] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3ef2dbbe-0cf3-4098-91d8-e206a872bd08/3ef2dbbe-0cf3-4098-91d8-e206a872bd08.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1649.230841] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbb166dd-0e96-48e0-8eb6-446f213cf8b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.240160] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1649.240160] env[62525]: value = "task-1781664" [ 1649.240160] env[62525]: _type = "Task" [ 1649.240160] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.247809] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.312067] env[62525]: DEBUG oslo_vmware.api [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Task: {'id': task-1781660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.312266] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1649.313035] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1649.313035] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1649.313035] env[62525]: INFO nova.compute.manager [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1649.313227] env[62525]: DEBUG oslo.service.loopingcall [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.313421] env[62525]: DEBUG nova.compute.manager [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1649.313516] env[62525]: DEBUG nova.network.neutron [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1649.339190] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083446} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.339311] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1649.340451] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43a2ef1-f464-400a-ad54-104de4fc0589 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.363545] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4/c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1649.363841] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665149c8-e46f-468e-834f-41246586949e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.385587] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1649.385587] env[62525]: value = "task-1781665" [ 1649.385587] env[62525]: _type = "Task" [ 1649.385587] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.394359] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781665, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.406158] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781663, 'name': CreateVM_Task, 'duration_secs': 0.50643} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.409184] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.409510] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781662, 'name': Rename_Task, 'duration_secs': 0.185926} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.410182] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.410357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.410713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1649.411015] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.411262] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21cc1604-b4c3-4645-a46c-eda4d432718d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.412956] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91b5b7a9-0d8e-4ef0-adba-b56e54cd6e67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.417507] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1649.417507] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5234d914-3e8d-2f05-9d79-737db7e88731" [ 1649.417507] env[62525]: _type = "Task" [ 1649.417507] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.421791] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1649.421791] env[62525]: value = "task-1781666" [ 1649.421791] env[62525]: _type = "Task" [ 1649.421791] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.430404] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5234d914-3e8d-2f05-9d79-737db7e88731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.430880] env[62525]: INFO nova.compute.manager [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Shelve offloading [ 1649.432907] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1649.432992] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cd04e0c-8648-4b6d-913e-f97f17e3f53e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.437989] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781666, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.442575] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1649.442575] env[62525]: value = "task-1781667" [ 1649.442575] env[62525]: _type = "Task" [ 1649.442575] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.450154] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.751892] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781664, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.895711] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781665, 'name': ReconfigVM_Task, 'duration_secs': 0.355421} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.895711] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfigured VM instance instance-00000050 to attach disk [datastore1] c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4/c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1649.896870] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-909eed92-ea41-4e44-9ff1-086f2084f349 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.902918] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1649.902918] env[62525]: value = "task-1781668" [ 1649.902918] env[62525]: _type = "Task" [ 1649.902918] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.912061] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781668, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.915924] env[62525]: DEBUG nova.network.neutron [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Updated VIF entry in instance network info cache for port 85a6973b-d6f5-4c81-b449-8ab01aa9a0f3. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1649.916279] env[62525]: DEBUG nova.network.neutron [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Updating instance_info_cache with network_info: [{"id": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "address": "fa:16:3e:99:b0:c2", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85a6973b-d6", "ovs_interfaceid": "85a6973b-d6f5-4c81-b449-8ab01aa9a0f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.926909] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5234d914-3e8d-2f05-9d79-737db7e88731, 'name': SearchDatastore_Task, 'duration_secs': 0.011924} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.930740] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.931015] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1649.931263] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.931501] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.931584] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1649.932214] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-836a8e42-d849-491a-b975-4e16e647d68d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.939869] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781666, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.941806] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1649.941994] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1649.942757] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9541bd59-5603-4c9c-ac15-f34132bc3b23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.952690] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1649.952690] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e38e2e-e3f5-77d3-5fbd-14fbfeec1012" [ 1649.952690] env[62525]: _type = "Task" [ 1649.952690] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.960901] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1649.961202] env[62525]: DEBUG nova.compute.manager [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1649.961935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80081846-1b9d-4c4f-995a-721883c1b599 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.971684] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e38e2e-e3f5-77d3-5fbd-14fbfeec1012, 'name': SearchDatastore_Task, 'duration_secs': 0.012046} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.971994] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.972164] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.972344] env[62525]: DEBUG nova.network.neutron [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1649.974360] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0b9174a-a6c7-41c7-92e5-1c42b8101777 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.979858] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1649.979858] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e188b-e117-7f33-92b5-5798cf4382f2" [ 1649.979858] env[62525]: _type = "Task" [ 1649.979858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.989367] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528e188b-e117-7f33-92b5-5798cf4382f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.248749] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.249012] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3ef2dbbe-0cf3-4098-91d8-e206a872bd08/3ef2dbbe-0cf3-4098-91d8-e206a872bd08.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1650.249224] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1650.249456] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ec07dc1-200e-4df2-921b-18815e376585 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.256832] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1650.256832] env[62525]: value = "task-1781669" [ 1650.256832] env[62525]: _type = "Task" [ 1650.256832] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.264499] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.323025] env[62525]: DEBUG nova.network.neutron [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.415376] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781668, 'name': Rename_Task, 'duration_secs': 0.214996} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.415376] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1650.415376] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cae38183-a029-4840-9d72-3566db34793a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.419605] env[62525]: DEBUG oslo_concurrency.lockutils [req-01e8cc40-8df2-4d84-b66f-f564d1e917ea req-323bf197-b469-40c1-9c24-da8590dc0c73 service nova] Releasing lock "refresh_cache-2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.420442] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1650.420442] env[62525]: value = "task-1781670" [ 1650.420442] env[62525]: _type = "Task" [ 1650.420442] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.428847] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.436129] env[62525]: DEBUG oslo_vmware.api [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781666, 'name': PowerOnVM_Task, 'duration_secs': 0.573513} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.436388] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1650.436590] env[62525]: INFO nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Took 13.36 seconds to spawn the instance on the hypervisor. [ 1650.436765] env[62525]: DEBUG nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.438295] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314b82fa-de6f-4e00-bf50-02be2e627cc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.494357] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528e188b-e117-7f33-92b5-5798cf4382f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010246} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.494606] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.494922] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9/2ad723ff-6540-4bb4-b09e-52e6a9fb12b9.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1650.495892] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61792ec0-e524-4179-a7d7-2b41e8acfdfe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.505108] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1650.505108] env[62525]: value = "task-1781671" [ 1650.505108] env[62525]: _type = "Task" [ 1650.505108] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.514265] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781671, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.727038] env[62525]: DEBUG nova.network.neutron [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.767668] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.767950] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1650.768778] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f8ec57-416b-4e1f-b3d4-b20180e9f2ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.792515] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 3ef2dbbe-0cf3-4098-91d8-e206a872bd08/3ef2dbbe-0cf3-4098-91d8-e206a872bd08.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1650.792847] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a2e2222-adae-45e0-82ed-e1c05f473292 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.814303] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1650.814303] env[62525]: value = "task-1781672" [ 1650.814303] env[62525]: _type = "Task" [ 1650.814303] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.825787] env[62525]: INFO nova.compute.manager [-] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Took 1.51 seconds to deallocate network for instance. [ 1650.826123] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781672, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.932526] env[62525]: DEBUG oslo_vmware.api [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781670, 'name': PowerOnVM_Task, 'duration_secs': 0.505582} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.932883] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1650.933115] env[62525]: INFO nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1650.933317] env[62525]: DEBUG nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.934190] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2590d23-4077-4844-8942-24d95e245363 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.955212] env[62525]: INFO nova.compute.manager [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Took 18.86 seconds to build instance. [ 1651.019445] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781671, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499391} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.019663] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9/2ad723ff-6540-4bb4-b09e-52e6a9fb12b9.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1651.019878] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1651.020144] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f2824b1-1359-4d51-bf42-478e416884b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.028390] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1651.028390] env[62525]: value = "task-1781673" [ 1651.028390] env[62525]: _type = "Task" [ 1651.028390] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.037927] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.092107] env[62525]: DEBUG nova.compute.manager [req-0caceaa8-0040-476a-b969-baefe861aabe req-a325e63b-763e-4b4f-8fef-c1a344193c0e service nova] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Received event network-vif-deleted-e95ca310-933c-4095-a25b-170fc26750e5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1651.230447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.326068] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781672, 'name': ReconfigVM_Task, 'duration_secs': 0.424838} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.326263] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 3ef2dbbe-0cf3-4098-91d8-e206a872bd08/3ef2dbbe-0cf3-4098-91d8-e206a872bd08.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1651.326864] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a536538-35b4-494d-8a58-22ccb7872bcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.332682] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.332998] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.333245] env[62525]: DEBUG nova.objects.instance [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lazy-loading 'resources' on Instance uuid 7a92bac8-9cee-41ed-81e3-08b48432fe7c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.335373] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1651.335373] env[62525]: value = "task-1781674" [ 1651.335373] env[62525]: _type = "Task" [ 1651.335373] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.344060] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781674, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.453925] env[62525]: INFO nova.compute.manager [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Took 16.58 seconds to build instance. [ 1651.457428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-348f6f7a-d8dc-452b-8080-18d83a079ae0 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.383s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.538998] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071129} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.539149] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1651.540402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c877234e-9d64-4b4c-9490-acc5c1f4fba2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.562298] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9/2ad723ff-6540-4bb4-b09e-52e6a9fb12b9.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1651.562910] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c57e7a9-5c0b-4b90-87fb-9a0cead84455 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.585335] env[62525]: DEBUG nova.compute.manager [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-vif-unplugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1651.585740] env[62525]: DEBUG oslo_concurrency.lockutils [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.585800] env[62525]: DEBUG oslo_concurrency.lockutils [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.585974] env[62525]: DEBUG oslo_concurrency.lockutils [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.586300] env[62525]: DEBUG nova.compute.manager [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] No waiting events found dispatching network-vif-unplugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1651.586491] env[62525]: WARNING nova.compute.manager [req-20c78127-e346-4725-b7e1-46e6475b0e94 req-b89dcb77-e56f-423e-b0f8-03116b0e7e22 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received unexpected event network-vif-unplugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 for instance with vm_state shelved and task_state shelving_offloading. [ 1651.588521] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1651.588521] env[62525]: value = "task-1781675" [ 1651.588521] env[62525]: _type = "Task" [ 1651.588521] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.598989] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781675, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.614809] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1651.615821] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b986a8b-474d-4365-ab98-b0895ed18861 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.623938] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1651.624245] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15daa424-88d3-40a1-91c8-da3ffc41457f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.740778] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1651.741033] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1651.741221] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleting the datastore file [datastore1] e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1651.741496] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c05db7d7-c73c-4b1e-893a-951863089557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.748306] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1651.748306] env[62525]: value = "task-1781677" [ 1651.748306] env[62525]: _type = "Task" [ 1651.748306] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.756572] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.848307] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781674, 'name': Rename_Task, 'duration_secs': 0.138875} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.848584] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1651.848829] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a49acc75-dbf3-4165-9fb7-c6f81845f0ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.854845] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1651.854845] env[62525]: value = "task-1781678" [ 1651.854845] env[62525]: _type = "Task" [ 1651.854845] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.869970] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.955728] env[62525]: DEBUG oslo_concurrency.lockutils [None req-784aeebc-e7a9-494c-a382-31949f43b398 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.090s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.103115] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.114802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe714d9-7c71-48ff-8875-2e3dce3f9f02 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.122827] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a338c3a6-7100-4a34-adc9-224a3db1797d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.156345] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1265fc45-6f17-4be9-ada5-6cbb7192555f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.168017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111af754-7fe5-4b1d-88ea-13cb18ae0165 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.180719] env[62525]: DEBUG nova.compute.provider_tree [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.182884] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.183042] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.183247] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.183432] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.183595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.185583] env[62525]: INFO nova.compute.manager [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Terminating instance [ 1652.191018] env[62525]: DEBUG nova.compute.manager [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1652.191018] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.191018] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db695be-b655-42d8-bb1a-256f6ff04eaf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.198232] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.198605] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2de6e6d1-e65a-4b15-b7ef-fc1d1d910db9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.205984] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1652.205984] env[62525]: value = "task-1781679" [ 1652.205984] env[62525]: _type = "Task" [ 1652.205984] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.214762] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.258659] env[62525]: DEBUG oslo_vmware.api [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366492} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.259040] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1652.259288] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1652.259516] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1652.285641] env[62525]: INFO nova.scheduler.client.report [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted allocations for instance e8586018-100e-4729-97fc-98effa87cd9e [ 1652.365335] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781678, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.525981] env[62525]: INFO nova.compute.manager [None req-7c597a70-22da-45a1-9bfd-1109769c819e tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Get console output [ 1652.526479] env[62525]: WARNING nova.virt.vmwareapi.driver [None req-7c597a70-22da-45a1-9bfd-1109769c819e tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] The console log is missing. Check your VSPC configuration [ 1652.602207] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781675, 'name': ReconfigVM_Task, 'duration_secs': 0.817983} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.602479] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9/2ad723ff-6540-4bb4-b09e-52e6a9fb12b9.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1652.603181] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-985c292a-8bd4-433b-bca2-21e04bd098ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.610260] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1652.610260] env[62525]: value = "task-1781680" [ 1652.610260] env[62525]: _type = "Task" [ 1652.610260] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.617346] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781680, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.684050] env[62525]: DEBUG nova.scheduler.client.report [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1652.716327] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781679, 'name': PowerOffVM_Task, 'duration_secs': 0.250621} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.716607] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.716772] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.717019] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60db7b16-025b-44fa-a34e-293cf8e91eb3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.790357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.866340] env[62525]: DEBUG oslo_vmware.api [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781678, 'name': PowerOnVM_Task, 'duration_secs': 0.735386} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.866622] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1652.866862] env[62525]: INFO nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1652.867040] env[62525]: DEBUG nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1652.867938] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd74ac71-0743-49af-a1f5-54028fecd496 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.028668] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.028918] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.122445] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781680, 'name': Rename_Task, 'duration_secs': 0.380926} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.122445] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.122445] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a49bfe6a-9bab-4c63-8d9c-c53130073b13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.129934] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1653.129934] env[62525]: value = "task-1781682" [ 1653.129934] env[62525]: _type = "Task" [ 1653.129934] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.136653] env[62525]: DEBUG nova.compute.manager [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1653.136840] env[62525]: DEBUG nova.compute.manager [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1653.137075] env[62525]: DEBUG oslo_concurrency.lockutils [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.137205] env[62525]: DEBUG oslo_concurrency.lockutils [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.137421] env[62525]: DEBUG nova.network.neutron [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1653.144023] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.188887] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.192379] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.402s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.194197] env[62525]: DEBUG nova.objects.instance [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'resources' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1653.194622] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.194835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.211337] env[62525]: INFO nova.scheduler.client.report [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Deleted allocations for instance 7a92bac8-9cee-41ed-81e3-08b48432fe7c [ 1653.390033] env[62525]: INFO nova.compute.manager [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Took 15.57 seconds to build instance. [ 1653.531325] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1653.610945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.611318] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.611556] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.611820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.612010] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.614119] env[62525]: INFO nova.compute.manager [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Terminating instance [ 1653.616017] env[62525]: DEBUG nova.compute.manager [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1653.616136] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1653.616953] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328b79b4-59fc-4c2b-8f0f-d50be4e5133c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.624764] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1653.624989] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fae7953-e4f3-44e3-ac22-31cd4b06674a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.632240] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1653.632240] env[62525]: value = "task-1781683" [ 1653.632240] env[62525]: _type = "Task" [ 1653.632240] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.649018] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.649309] env[62525]: DEBUG oslo_vmware.api [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781682, 'name': PowerOnVM_Task, 'duration_secs': 0.4339} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.649566] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1653.649764] env[62525]: INFO nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Took 6.87 seconds to spawn the instance on the hypervisor. [ 1653.649951] env[62525]: DEBUG nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1653.651129] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d6fcdb-daf5-4122-92ef-c16a3f4c0742 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.679839] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.679839] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.679839] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleting the datastore file [datastore1] 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.680050] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46f7fe1e-dd3c-4c10-8308-d7244bf32c3f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.688627] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1653.688627] env[62525]: value = "task-1781684" [ 1653.688627] env[62525]: _type = "Task" [ 1653.688627] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.700535] env[62525]: DEBUG nova.objects.instance [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'numa_topology' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1653.702736] env[62525]: DEBUG nova.compute.utils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1653.704255] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.718953] env[62525]: DEBUG oslo_concurrency.lockutils [None req-901b0280-8e1b-4127-9d95-338dbb7997da tempest-ServersWithSpecificFlavorTestJSON-1214620027 tempest-ServersWithSpecificFlavorTestJSON-1214620027-project-member] Lock "7a92bac8-9cee-41ed-81e3-08b48432fe7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.564s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.770260] env[62525]: DEBUG nova.compute.manager [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1653.770585] env[62525]: DEBUG nova.compute.manager [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing instance network info cache due to event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1653.770821] env[62525]: DEBUG oslo_concurrency.lockutils [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.771010] env[62525]: DEBUG oslo_concurrency.lockutils [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.771442] env[62525]: DEBUG nova.network.neutron [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing network info cache for port dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1653.892558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-42cd9607-3152-4f4c-ba19-b7b02f6e27ae tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.077s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.975833] env[62525]: DEBUG nova.network.neutron [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updated VIF entry in instance network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1653.976224] env[62525]: DEBUG nova.network.neutron [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.056144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.145476] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781683, 'name': PowerOffVM_Task, 'duration_secs': 0.263129} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.145774] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.145944] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.146217] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0106cf5a-e359-42d6-9c27-c4444ab35aa0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.174224] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Acquiring lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.174291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.174712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Acquiring lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.174712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.174846] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.176306] env[62525]: INFO nova.compute.manager [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Took 11.77 seconds to build instance. [ 1654.181020] env[62525]: INFO nova.compute.manager [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Terminating instance [ 1654.181020] env[62525]: DEBUG nova.compute.manager [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1654.181020] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1654.181020] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f417d2a8-d345-4a14-b64f-f54cf5eab504 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.189711] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1654.189982] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca975f7e-5c26-41e7-ac07-84420468ec1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.200501] env[62525]: DEBUG oslo_vmware.api [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.203529] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1654.203529] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1654.203529] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1654.203529] env[62525]: INFO nova.compute.manager [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Took 2.01 seconds to destroy the instance on the hypervisor. [ 1654.203529] env[62525]: DEBUG oslo.service.loopingcall [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.203529] env[62525]: DEBUG oslo_vmware.api [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Waiting for the task: (returnval){ [ 1654.203529] env[62525]: value = "task-1781686" [ 1654.203529] env[62525]: _type = "Task" [ 1654.203529] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.203529] env[62525]: DEBUG nova.compute.manager [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1654.203529] env[62525]: DEBUG nova.network.neutron [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1654.208091] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.208689] env[62525]: DEBUG nova.objects.base [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1654.275065] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1654.275362] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1654.275503] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleting the datastore file [datastore1] 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1654.277819] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22a30d87-81b6-4344-9e50-4a15389896f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.287474] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1654.287474] env[62525]: value = "task-1781687" [ 1654.287474] env[62525]: _type = "Task" [ 1654.287474] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.297519] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.481390] env[62525]: DEBUG oslo_concurrency.lockutils [req-c6aa8212-d015-49db-8ae7-3d5c53f16270 req-ef18b613-5ca2-4296-bb69-2a0b93e150f6 service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.494172] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c9f8c8-4dfc-4a2d-aaaa-5255e5b5a259 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.504022] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d525012-2835-414c-bd9a-667d6f07da2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.534956] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0eef65-4010-4249-8b54-a40f16fc55e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.538361] env[62525]: DEBUG nova.network.neutron [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updated VIF entry in instance network info cache for port dc247c87-0d2d-47bf-9d66-5e81d9237fa6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1654.538704] env[62525]: DEBUG nova.network.neutron [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": null, "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapdc247c87-0d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.546824] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08845f60-2d8c-4f0d-9e42-962a3204e702 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.559819] env[62525]: DEBUG nova.compute.provider_tree [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.678226] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.678654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-567d8abb-fdd3-4d0e-b935-c98e3db0131b tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.281s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.716631] env[62525]: DEBUG oslo_vmware.api [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Task: {'id': task-1781686, 'name': PowerOffVM_Task, 'duration_secs': 0.240085} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.716924] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.718818] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.718818] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-822f84a5-13cd-4b22-84a2-5a199c740ce3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.788435] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1654.788648] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1654.788826] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Deleting the datastore file [datastore1] 3ef2dbbe-0cf3-4098-91d8-e206a872bd08 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1654.793749] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f822c689-4317-4dd0-a0a9-89b6bad3b55d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.805307] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.805560] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.805734] env[62525]: DEBUG nova.compute.manager [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.807176] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a268a78c-6958-4faa-bc85-acb3f0661591 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.814304] env[62525]: DEBUG oslo_vmware.api [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230214} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.814638] env[62525]: DEBUG oslo_vmware.api [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Waiting for the task: (returnval){ [ 1654.814638] env[62525]: value = "task-1781689" [ 1654.814638] env[62525]: _type = "Task" [ 1654.814638] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.815248] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1654.815479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1654.815711] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1654.815887] env[62525]: INFO nova.compute.manager [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1654.816192] env[62525]: DEBUG oslo.service.loopingcall [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.816486] env[62525]: DEBUG nova.compute.manager [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1654.816602] env[62525]: DEBUG nova.network.neutron [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1654.827338] env[62525]: DEBUG nova.compute.manager [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1654.827714] env[62525]: DEBUG nova.objects.instance [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'flavor' on Instance uuid 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1654.835673] env[62525]: DEBUG oslo_vmware.api [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Task: {'id': task-1781689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.041986] env[62525]: DEBUG oslo_concurrency.lockutils [req-e52af102-12c0-49ad-b618-ff30690c6bcc req-1b05d1ef-cab0-4cf7-b72c-4487818638ef service nova] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.063494] env[62525]: DEBUG nova.scheduler.client.report [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1655.294065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.294065] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.295217] env[62525]: INFO nova.compute.manager [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Attaching volume d63609e3-47b0-4b59-82a3-3f36cd21f331 to /dev/sdb [ 1655.326374] env[62525]: DEBUG oslo_vmware.api [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Task: {'id': task-1781689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155344} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.326725] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1655.326805] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1655.327057] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1655.328137] env[62525]: INFO nova.compute.manager [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1655.328137] env[62525]: DEBUG oslo.service.loopingcall [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.328279] env[62525]: DEBUG nova.compute.manager [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1655.328388] env[62525]: DEBUG nova.network.neutron [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1655.332535] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.332796] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7ad2ebc-65a2-49dd-a3ef-67738586e193 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.339527] env[62525]: DEBUG oslo_vmware.api [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1655.339527] env[62525]: value = "task-1781690" [ 1655.339527] env[62525]: _type = "Task" [ 1655.339527] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.341676] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1f767b-d776-4b79-9ae4-1b5358eb215f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.351026] env[62525]: DEBUG nova.compute.manager [req-c6fadc6f-8288-41ee-b6a3-0b000eedc123 req-637461dd-cd82-4b72-81fc-ba36b67f5de0 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Received event network-vif-deleted-151d8aa1-065a-409f-9d41-61d553ade236 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1655.351219] env[62525]: INFO nova.compute.manager [req-c6fadc6f-8288-41ee-b6a3-0b000eedc123 req-637461dd-cd82-4b72-81fc-ba36b67f5de0 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Neutron deleted interface 151d8aa1-065a-409f-9d41-61d553ade236; detaching it from the instance and deleting it from the info cache [ 1655.351385] env[62525]: DEBUG nova.network.neutron [req-c6fadc6f-8288-41ee-b6a3-0b000eedc123 req-637461dd-cd82-4b72-81fc-ba36b67f5de0 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.362992] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c45415f-b958-428f-ba4f-f89256575f96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.365576] env[62525]: DEBUG oslo_vmware.api [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.381308] env[62525]: DEBUG nova.virt.block_device [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating existing volume attachment record: 5a32036f-a6ec-451c-a409-8c0ea3747de7 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1655.509854] env[62525]: DEBUG nova.network.neutron [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.317081] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.122s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.317081] env[62525]: DEBUG nova.network.neutron [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.317587] env[62525]: INFO nova.compute.manager [-] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Took 2.11 seconds to deallocate network for instance. [ 1656.319264] env[62525]: DEBUG nova.compute.manager [req-06275362-ca38-4160-89a3-6730f6c7f349 req-d03f87b9-4ccc-40a4-878e-bdde27c74d97 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-deleted-befd9821-2180-4d16-b3eb-fb5cd2996595 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.319264] env[62525]: DEBUG nova.compute.manager [req-06275362-ca38-4160-89a3-6730f6c7f349 req-d03f87b9-4ccc-40a4-878e-bdde27c74d97 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-deleted-aa34451f-78cd-485b-9a6a-4ff6e664707f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.319449] env[62525]: DEBUG nova.compute.manager [req-06275362-ca38-4160-89a3-6730f6c7f349 req-d03f87b9-4ccc-40a4-878e-bdde27c74d97 service nova] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Received event network-vif-deleted-43bbe938-60f4-4e29-9eb3-2eeecc0f7dba {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.320551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.264s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.322494] env[62525]: INFO nova.compute.claims [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1656.332022] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12b4dc73-d31f-40e3-9736-2de9df5c8f26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.341900] env[62525]: DEBUG oslo_vmware.api [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781690, 'name': PowerOffVM_Task, 'duration_secs': 0.195032} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.342341] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.342466] env[62525]: DEBUG nova.compute.manager [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1656.346035] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a420d7d-5920-41e2-be5b-461417e4ccf5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.352026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e1c461-dff7-46fb-9dce-2ea7f12d3910 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.383131] env[62525]: DEBUG nova.compute.manager [req-c6fadc6f-8288-41ee-b6a3-0b000eedc123 req-637461dd-cd82-4b72-81fc-ba36b67f5de0 service nova] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Detach interface failed, port_id=151d8aa1-065a-409f-9d41-61d553ade236, reason: Instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1656.820964] env[62525]: DEBUG nova.network.neutron [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.823221] env[62525]: INFO nova.compute.manager [-] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Took 2.01 seconds to deallocate network for instance. [ 1656.830722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dc59a279-00af-437f-b5bc-96d9a5574283 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.045s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.831437] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.153s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.831621] env[62525]: INFO nova.compute.manager [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Unshelving [ 1656.841703] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.868857] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f544a1a-92f1-4866-8921-0342641f115d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.325615] env[62525]: INFO nova.compute.manager [-] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Took 2.00 seconds to deallocate network for instance. [ 1657.332266] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.377307] env[62525]: DEBUG nova.compute.manager [req-2f511af2-e73a-4b9a-8441-9c2056f0f143 req-99663acb-9a7d-46fd-b69a-eba6306381a7 service nova] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Received event network-vif-deleted-3cc455e7-72d7-4e09-ba23-155358d0b956 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1657.443382] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.443619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.443823] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.444008] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.444180] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.446325] env[62525]: INFO nova.compute.manager [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Terminating instance [ 1657.450707] env[62525]: DEBUG nova.compute.manager [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1657.450907] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.451764] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854dbb58-24d6-4094-bf1e-1705986e8e72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.461887] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.462132] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af4bf0d6-4b72-499c-9a8a-65895c76ae0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.541351] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.541597] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.541785] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.542073] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0487be9b-fcba-4328-929d-26d517c091ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.548950] env[62525]: DEBUG oslo_vmware.api [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1657.548950] env[62525]: value = "task-1781695" [ 1657.548950] env[62525]: _type = "Task" [ 1657.548950] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.556987] env[62525]: DEBUG oslo_vmware.api [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.569786] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90915ac-4404-4111-a431-0017f85e6d4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.577700] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f885ec2f-57ff-48d3-9d30-264232019ab7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.607500] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6286905-7eba-4b5c-9a51-1e77637baf43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.615361] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44274b8f-d870-4727-8e14-d132a8237246 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.629758] env[62525]: DEBUG nova.compute.provider_tree [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1657.812354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.812659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.833642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.856866] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.059036] env[62525]: DEBUG oslo_vmware.api [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140918} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.059294] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.059501] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.059684] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.059889] env[62525]: INFO nova.compute.manager [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1658.060147] env[62525]: DEBUG oslo.service.loopingcall [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.060338] env[62525]: DEBUG nova.compute.manager [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1658.060428] env[62525]: DEBUG nova.network.neutron [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1658.133255] env[62525]: DEBUG nova.scheduler.client.report [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1658.315701] env[62525]: DEBUG nova.compute.utils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1658.638859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.639419] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1658.642474] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.801s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.642723] env[62525]: DEBUG nova.objects.instance [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lazy-loading 'resources' on Instance uuid 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1658.818618] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.962779] env[62525]: DEBUG nova.network.neutron [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.149855] env[62525]: DEBUG nova.compute.utils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1659.151573] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1659.151796] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1659.209128] env[62525]: DEBUG nova.policy [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1659.408670] env[62525]: DEBUG nova.compute.manager [req-19dbc0c6-19a4-4e4b-b3cf-39280d7af16c req-610860c1-c31a-429d-81ef-9030558d0e61 service nova] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Received event network-vif-deleted-85a6973b-d6f5-4c81-b449-8ab01aa9a0f3 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.413759] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf4da40-970d-4783-b91c-029af95224d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.423140] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7ad27e-c3aa-438c-8c16-1f683c45d34b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.456061] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1085d22d-bf4c-4d4a-b060-9f46f9257a6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.463885] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9a1653-1006-4cf5-b617-907b3ae2da45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.468169] env[62525]: INFO nova.compute.manager [-] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Took 1.41 seconds to deallocate network for instance. [ 1659.482098] env[62525]: DEBUG nova.compute.provider_tree [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1659.510538] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Successfully created port: 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1659.654353] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1659.896774] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.897215] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.897546] env[62525]: INFO nova.compute.manager [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Attaching volume f910a09f-577c-4fc0-bd96-40dafac718ab to /dev/sdb [ 1659.932905] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779778d3-338c-4305-a5cb-07a49816d57a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.941233] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cbae56-bd57-4bc8-a8f8-bae9a25634f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.955375] env[62525]: DEBUG nova.virt.block_device [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating existing volume attachment record: 79a6b43c-bcc0-4ae3-8ade-e5314155156d {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1659.984312] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.002669] env[62525]: ERROR nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [req-c4229cf2-4616-44fc-9d78-b7a6dfe16dd4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4229cf2-4616-44fc-9d78-b7a6dfe16dd4"}]} [ 1660.020242] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1660.034719] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1660.035089] env[62525]: DEBUG nova.compute.provider_tree [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.047738] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1660.065275] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1660.286423] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bce03a0-8aab-4c74-8197-418438a12ce6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.295347] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fe8b8d-6bd5-4ded-a6e3-b957782f4c98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.328482] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795fe48d-f400-4330-a07e-c0d5ebf64770 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.336527] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e514be-9934-4bc0-bada-73c7946f162b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.350410] env[62525]: DEBUG nova.compute.provider_tree [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.434203] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1660.434513] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369790', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'name': 'volume-d63609e3-47b0-4b59-82a3-3f36cd21f331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94560d78-071c-419d-ad10-f42a5b2271a8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'serial': 'd63609e3-47b0-4b59-82a3-3f36cd21f331'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1660.435482] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9b089c-84ae-42f9-acee-2e7331641848 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.452401] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453e761a-bb12-41a4-82cd-eb125b2c4644 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.478450] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] volume-d63609e3-47b0-4b59-82a3-3f36cd21f331/volume-d63609e3-47b0-4b59-82a3-3f36cd21f331.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1660.478782] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33cd78bb-7560-495d-bd7a-aebc0c2c2a8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.497918] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1660.497918] env[62525]: value = "task-1781700" [ 1660.497918] env[62525]: _type = "Task" [ 1660.497918] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.506863] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781700, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.663522] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1660.687613] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.687893] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.688115] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.688291] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.688428] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.688582] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.688806] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.688984] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.689230] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.689420] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.689625] env[62525]: DEBUG nova.virt.hardware [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.690552] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456ea21c-db1b-42fe-93ee-58735535d25a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.701232] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca93ad56-f959-4dfe-bb18-2c69efe475ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.779630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.779860] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.871843] env[62525]: ERROR nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [req-b000e08d-e0fb-4be9-bae3-0dba8020e843] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b000e08d-e0fb-4be9-bae3-0dba8020e843"}]} [ 1660.888831] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1660.903422] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1660.903693] env[62525]: DEBUG nova.compute.provider_tree [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.916295] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1660.934100] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1661.008623] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781700, 'name': ReconfigVM_Task, 'duration_secs': 0.377812} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.010931] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfigured VM instance instance-00000036 to attach disk [datastore1] volume-d63609e3-47b0-4b59-82a3-3f36cd21f331/volume-d63609e3-47b0-4b59-82a3-3f36cd21f331.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1661.015971] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e014e7e1-4a44-4558-9736-b06ded0e7b00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.026830] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Successfully updated port: 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1661.036867] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1661.036867] env[62525]: value = "task-1781701" [ 1661.036867] env[62525]: _type = "Task" [ 1661.036867] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.046953] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.160121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff70f46d-119f-474c-88f2-c471835754f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.168075] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a446d358-b9c2-4209-b556-a2f8a1b7ab2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.199359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e1da16-3485-44f0-8821-897b1fc3a327 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.206928] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0a8028-01f2-4649-963a-1b3cf5b94aa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.220283] env[62525]: DEBUG nova.compute.provider_tree [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.283296] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1661.439232] env[62525]: DEBUG nova.compute.manager [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-vif-plugged-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1661.439460] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.439667] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.439894] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.440110] env[62525]: DEBUG nova.compute.manager [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] No waiting events found dispatching network-vif-plugged-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1661.440286] env[62525]: WARNING nova.compute.manager [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received unexpected event network-vif-plugged-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 for instance with vm_state building and task_state spawning. [ 1661.440445] env[62525]: DEBUG nova.compute.manager [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1661.440595] env[62525]: DEBUG nova.compute.manager [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing instance network info cache due to event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1661.440773] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.440906] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.441070] env[62525]: DEBUG nova.network.neutron [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1661.530858] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.545725] env[62525]: DEBUG oslo_vmware.api [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781701, 'name': ReconfigVM_Task, 'duration_secs': 0.167392} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.546041] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369790', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'name': 'volume-d63609e3-47b0-4b59-82a3-3f36cd21f331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94560d78-071c-419d-ad10-f42a5b2271a8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'serial': 'd63609e3-47b0-4b59-82a3-3f36cd21f331'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1661.724098] env[62525]: DEBUG nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1661.801019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.978599] env[62525]: DEBUG nova.network.neutron [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1662.057337] env[62525]: DEBUG nova.network.neutron [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.228909] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.586s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.231262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.899s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.231518] env[62525]: DEBUG nova.objects.instance [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'resources' on Instance uuid 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.251776] env[62525]: INFO nova.scheduler.client.report [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleted allocations for instance 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8 [ 1662.559598] env[62525]: DEBUG oslo_concurrency.lockutils [req-a96766b9-e038-4627-8632-30daa18b72af req-2e212724-7f52-4a86-a412-4d16113fdbfb service nova] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.559996] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.560179] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1662.578859] env[62525]: DEBUG nova.objects.instance [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lazy-loading 'flavor' on Instance uuid 94560d78-071c-419d-ad10-f42a5b2271a8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.761465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-196e23b9-c67b-4134-9162-dae53b630d76 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "4e52e21e-4db3-45e5-b88d-455d1b8ea5c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.578s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.979298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31819492-9b00-4987-a5bd-e4c496df9e96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.986786] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85110a2e-203a-4935-ab10-b5f05ca69540 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.017916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2f1bbc-0f38-410b-8426-40faeb7031ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.025422] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b74af5-aef6-48c4-bd52-3e1d057912a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.038841] env[62525]: DEBUG nova.compute.provider_tree [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.087329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-79fb0000-4d4f-4d1d-82cf-1b417955593c tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.793s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.093299] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1663.237774] env[62525]: DEBUG nova.network.neutron [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.379044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.379044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.541903] env[62525]: DEBUG nova.scheduler.client.report [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1663.742202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.742202] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance network_info: |[{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1663.742202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:3a:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1663.748533] env[62525]: DEBUG oslo.service.loopingcall [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.748763] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1663.748979] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b01db8fc-d2c1-4029-9f08-623acdcaf9bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.768454] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1663.768454] env[62525]: value = "task-1781703" [ 1663.768454] env[62525]: _type = "Task" [ 1663.768454] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.776079] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781703, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.886960] env[62525]: INFO nova.compute.manager [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Detaching volume d63609e3-47b0-4b59-82a3-3f36cd21f331 [ 1663.921775] env[62525]: INFO nova.virt.block_device [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Attempting to driver detach volume d63609e3-47b0-4b59-82a3-3f36cd21f331 from mountpoint /dev/sdb [ 1663.921775] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1663.921947] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369790', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'name': 'volume-d63609e3-47b0-4b59-82a3-3f36cd21f331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94560d78-071c-419d-ad10-f42a5b2271a8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'serial': 'd63609e3-47b0-4b59-82a3-3f36cd21f331'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1663.922953] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c212ae5-a03f-41ee-8806-8dc533a93f8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.945935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb0b9fc-6a9d-42ce-bab4-90c1859a7807 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.953353] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8134e4cf-9658-48d0-9315-70836264f963 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.974150] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8bf5fd-5553-491a-ac07-f2657b4ff7ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.990112] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] The volume has not been displaced from its original location: [datastore1] volume-d63609e3-47b0-4b59-82a3-3f36cd21f331/volume-d63609e3-47b0-4b59-82a3-3f36cd21f331.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1663.995343] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1663.995637] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-098fe1cd-9872-4d15-bc19-5570872a512b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.014083] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1664.014083] env[62525]: value = "task-1781704" [ 1664.014083] env[62525]: _type = "Task" [ 1664.014083] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.023709] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.048743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.051212] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.218s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.051588] env[62525]: DEBUG nova.objects.instance [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lazy-loading 'resources' on Instance uuid 3ef2dbbe-0cf3-4098-91d8-e206a872bd08 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1664.070836] env[62525]: INFO nova.scheduler.client.report [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleted allocations for instance 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d [ 1664.279914] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781703, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.501604] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1664.501900] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369792', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'name': 'volume-f910a09f-577c-4fc0-bd96-40dafac718ab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0067de08-6708-4c7c-a83a-ed9df193d5cd', 'attached_at': '', 'detached_at': '', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'serial': 'f910a09f-577c-4fc0-bd96-40dafac718ab'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1664.502831] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da3915f-6227-4be8-9008-ad110b9e6dcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.522452] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9e2700-2ad2-4c21-a727-6587a33a419e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.530235] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781704, 'name': ReconfigVM_Task, 'duration_secs': 0.290461} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.542359] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1664.554429] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] volume-f910a09f-577c-4fc0-bd96-40dafac718ab/volume-f910a09f-577c-4fc0-bd96-40dafac718ab.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1664.554687] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2d82d70-bab2-443b-bc6d-cc3a0d2979d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.564378] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e180fbf0-8aac-4e8e-bb8e-a9cc154e22aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.581860] env[62525]: DEBUG oslo_concurrency.lockutils [None req-300baf23-b11c-4dbc-8760-13b06979d3ab tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "29aaac3b-1f0e-40fe-9805-a0e6e6ae597d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.971s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.586821] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1664.586821] env[62525]: value = "task-1781706" [ 1664.586821] env[62525]: _type = "Task" [ 1664.586821] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.588218] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1664.588218] env[62525]: value = "task-1781705" [ 1664.588218] env[62525]: _type = "Task" [ 1664.588218] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.603132] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781706, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.606584] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781705, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.763010] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7678f9d0-b179-4b2b-bb6a-1cea6c5760a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.773832] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20dde18-60a0-46d9-ab88-35f6b76e2f08 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.782177] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781703, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.807768] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4874aed4-450d-44b7-81db-1f59874875ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.815144] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3138e0-17f1-401e-8c27-3cfe3df4baab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.829212] env[62525]: DEBUG nova.compute.provider_tree [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1665.102588] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781706, 'name': ReconfigVM_Task, 'duration_secs': 0.363964} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.106136] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfigured VM instance instance-00000037 to attach disk [datastore1] volume-f910a09f-577c-4fc0-bd96-40dafac718ab/volume-f910a09f-577c-4fc0-bd96-40dafac718ab.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1665.112156] env[62525]: DEBUG oslo_vmware.api [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781705, 'name': ReconfigVM_Task, 'duration_secs': 0.140481} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.112411] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0b31ea7-6770-45ea-9294-ecb63525fb0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.123044] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369790', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'name': 'volume-d63609e3-47b0-4b59-82a3-3f36cd21f331', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94560d78-071c-419d-ad10-f42a5b2271a8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd63609e3-47b0-4b59-82a3-3f36cd21f331', 'serial': 'd63609e3-47b0-4b59-82a3-3f36cd21f331'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1665.132280] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1665.132280] env[62525]: value = "task-1781707" [ 1665.132280] env[62525]: _type = "Task" [ 1665.132280] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.140848] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.234936] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.235445] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.235646] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1665.235816] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1665.273573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.273999] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.291478] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781703, 'name': CreateVM_Task, 'duration_secs': 1.251253} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.292200] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1665.292948] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.293214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.293756] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1665.294396] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a02cc9d3-b4fc-47d4-a750-221821b0834c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.302114] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1665.302114] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d90e56-2bc1-f96c-99bf-638982bfdbd6" [ 1665.302114] env[62525]: _type = "Task" [ 1665.302114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.311725] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d90e56-2bc1-f96c-99bf-638982bfdbd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.332584] env[62525]: DEBUG nova.scheduler.client.report [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1665.642742] env[62525]: DEBUG oslo_vmware.api [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781707, 'name': ReconfigVM_Task, 'duration_secs': 0.16429} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.643151] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369792', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'name': 'volume-f910a09f-577c-4fc0-bd96-40dafac718ab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0067de08-6708-4c7c-a83a-ed9df193d5cd', 'attached_at': '', 'detached_at': '', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'serial': 'f910a09f-577c-4fc0-bd96-40dafac718ab'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1665.680317] env[62525]: DEBUG nova.objects.instance [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lazy-loading 'flavor' on Instance uuid 94560d78-071c-419d-ad10-f42a5b2271a8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.743148] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Skipping network cache update for instance because it is Building. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1665.783603] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1665.787134] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.787270] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.787416] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1665.787561] env[62525]: DEBUG nova.objects.instance [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lazy-loading 'info_cache' on Instance uuid fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.814100] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d90e56-2bc1-f96c-99bf-638982bfdbd6, 'name': SearchDatastore_Task, 'duration_secs': 0.012549} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.814426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.814677] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1665.814982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.815173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.815359] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.815668] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe3febb8-3a91-4557-9bc4-5903b2b2b70d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.832548] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.832548] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1665.833180] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e33454a4-d10a-4ec2-8189-bbdac5b75da9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.837757] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.841897] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.984s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.842242] env[62525]: DEBUG nova.objects.instance [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'pci_requests' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.844728] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1665.844728] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52eebeaa-51b6-b43f-bd6a-061bc2b12f6d" [ 1665.844728] env[62525]: _type = "Task" [ 1665.844728] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.861328] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52eebeaa-51b6-b43f-bd6a-061bc2b12f6d, 'name': SearchDatastore_Task, 'duration_secs': 0.010093} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.862574] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-514271a9-3ceb-4d8c-96f0-8d767a561be1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.869297] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1665.869297] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52417c3a-d43a-0346-0976-b71fd419a16d" [ 1665.869297] env[62525]: _type = "Task" [ 1665.869297] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.880711] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52417c3a-d43a-0346-0976-b71fd419a16d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.892548] env[62525]: INFO nova.scheduler.client.report [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Deleted allocations for instance 3ef2dbbe-0cf3-4098-91d8-e206a872bd08 [ 1666.311730] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.351091] env[62525]: DEBUG nova.objects.instance [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'numa_topology' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1666.380519] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52417c3a-d43a-0346-0976-b71fd419a16d, 'name': SearchDatastore_Task, 'duration_secs': 0.023325} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.380825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.381099] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 80fbfbda-07fb-43ab-be74-3cbdaf890a55/80fbfbda-07fb-43ab-be74-3cbdaf890a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1666.381353] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4b87a27-f748-45ae-8a52-f9638696af99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.388357] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1666.388357] env[62525]: value = "task-1781708" [ 1666.388357] env[62525]: _type = "Task" [ 1666.388357] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.396089] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.399508] env[62525]: DEBUG oslo_concurrency.lockutils [None req-558e0d51-862b-44b3-b10c-91cce023ac91 tempest-DeleteServersAdminTestJSON-2068679982 tempest-DeleteServersAdminTestJSON-2068679982-project-admin] Lock "3ef2dbbe-0cf3-4098-91d8-e206a872bd08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.225s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.696793] env[62525]: DEBUG oslo_concurrency.lockutils [None req-090f2b0b-745d-460e-96cb-a68a2ec2ccbb tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.318s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.699645] env[62525]: DEBUG nova.objects.instance [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'flavor' on Instance uuid 0067de08-6708-4c7c-a83a-ed9df193d5cd {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1666.856386] env[62525]: INFO nova.compute.claims [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.900244] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781708, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.900794] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 80fbfbda-07fb-43ab-be74-3cbdaf890a55/80fbfbda-07fb-43ab-be74-3cbdaf890a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1666.901224] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1666.901930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b238ed7-2308-41cc-98cb-33463ec978db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.908736] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1666.908736] env[62525]: value = "task-1781709" [ 1666.908736] env[62525]: _type = "Task" [ 1666.908736] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.917194] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.034014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "73156235-1b13-4fda-8957-ed8cd88ceb43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.034442] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.205152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0ee0f9b6-c21d-4ca9-8deb-776eafb480cf tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.308s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.277161] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.277406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.364281] env[62525]: INFO nova.compute.manager [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Rescuing [ 1667.364476] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.364622] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.364828] env[62525]: DEBUG nova.network.neutron [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.418594] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066003} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.418867] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1667.419730] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebab046b-321d-443b-bc86-444001417f44 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.442258] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 80fbfbda-07fb-43ab-be74-3cbdaf890a55/80fbfbda-07fb-43ab-be74-3cbdaf890a55.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.444996] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-445ddc84-deaa-4156-b8d0-dc9a1623a1aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.464431] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1667.464431] env[62525]: value = "task-1781710" [ 1667.464431] env[62525]: _type = "Task" [ 1667.464431] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.473825] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781710, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.537369] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.560430] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [{"id": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "address": "fa:16:3e:5e:6d:f5", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b0c2886-9f", "ovs_interfaceid": "6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.780030] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.976346] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781710, 'name': ReconfigVM_Task, 'duration_secs': 0.311904} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.980724] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 80fbfbda-07fb-43ab-be74-3cbdaf890a55/80fbfbda-07fb-43ab-be74-3cbdaf890a55.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1667.981596] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91355303-7824-4786-87b0-4b884bcff955 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.988484] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1667.988484] env[62525]: value = "task-1781711" [ 1667.988484] env[62525]: _type = "Task" [ 1667.988484] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.999206] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781711, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.058030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.063185] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.063379] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1668.063569] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.063716] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.063856] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.063997] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.067575] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.067575] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.067575] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1668.067575] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.093828] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.094126] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.112026] env[62525]: DEBUG nova.network.neutron [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.147837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9394b9e2-d59e-4027-91c2-d3bda441dc0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.155964] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f2370d-dc0a-4508-939c-5d836dcdf641 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.186445] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4e6d01-0399-4e80-b038-ed988ee7b544 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.194986] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81eb1fa1-2f04-4c31-9c4a-4c2baef6f2d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.211730] env[62525]: DEBUG nova.compute.provider_tree [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.296848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.497932] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781711, 'name': Rename_Task, 'duration_secs': 0.149146} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.499029] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.499029] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-412b7976-ae0d-4c89-9b2f-eb1166e65c23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.505251] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1668.505251] env[62525]: value = "task-1781712" [ 1668.505251] env[62525]: _type = "Task" [ 1668.505251] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.512743] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.571167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.597414] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1668.614428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.714600] env[62525]: DEBUG nova.scheduler.client.report [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1669.015664] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781712, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.119369] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.151723] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1669.152048] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6175174b-ae79-49c0-b981-b28df23bb75b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.159510] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1669.159510] env[62525]: value = "task-1781713" [ 1669.159510] env[62525]: _type = "Task" [ 1669.159510] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.168733] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.219795] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.222139] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.238s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.222563] env[62525]: DEBUG nova.objects.instance [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'resources' on Instance uuid 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.272972] env[62525]: INFO nova.network.neutron [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating port dc247c87-0d2d-47bf-9d66-5e81d9237fa6 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1669.515426] env[62525]: DEBUG oslo_vmware.api [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781712, 'name': PowerOnVM_Task, 'duration_secs': 0.662617} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.515698] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1669.515897] env[62525]: INFO nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1669.516218] env[62525]: DEBUG nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1669.517106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7077d993-72af-416e-a5fc-74c4b8c8f240 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.669571] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781713, 'name': PowerOffVM_Task, 'duration_secs': 0.383287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.669792] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1669.670521] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32228059-009a-45fc-b0fc-8415c0ee3ca4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.692232] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73d8df9-3981-40f2-9177-d7f33cb59ec4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.723920] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1669.724201] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afd86fb8-707e-45f3-be3d-47b8ee73d425 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.733464] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1669.733464] env[62525]: value = "task-1781714" [ 1669.733464] env[62525]: _type = "Task" [ 1669.733464] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.740726] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.021682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b002e3f-2520-4fd6-a781-f6755d9b2f03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.033460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb40f46-e82f-4416-b221-8cf0cf3bf0c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.038807] env[62525]: INFO nova.compute.manager [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Took 16.00 seconds to build instance. [ 1670.067204] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34627d5a-1e30-4700-b66e-91e61d9f3724 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.075770] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae88177-212e-4e96-a935-5c175222b9ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.090874] env[62525]: DEBUG nova.compute.provider_tree [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.244373] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1670.244537] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.244779] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.244982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.245210] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.245483] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb3c224-4aed-4920-8f77-fe4bbdd45901 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.254398] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.254582] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1670.255394] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5176ecfd-8e9d-4f08-8eb6-50c38ea88d67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.262583] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1670.262583] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522446f8-a3e1-86cb-9b66-88da8df51f7b" [ 1670.262583] env[62525]: _type = "Task" [ 1670.262583] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.272523] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522446f8-a3e1-86cb-9b66-88da8df51f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.541246] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c71c1a6c-3b24-424e-b199-65d2a5c308a2 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.512s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.597585] env[62525]: DEBUG nova.scheduler.client.report [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1670.658254] env[62525]: DEBUG nova.compute.manager [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.658443] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.658610] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.658779] env[62525]: DEBUG oslo_concurrency.lockutils [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.658964] env[62525]: DEBUG nova.compute.manager [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] No waiting events found dispatching network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1670.659279] env[62525]: WARNING nova.compute.manager [req-aa23572a-9104-436f-8985-b26f250e24b5 req-67799714-9906-4ee1-9a32-538526c38d91 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received unexpected event network-vif-plugged-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 for instance with vm_state shelved_offloaded and task_state spawning. [ 1670.739609] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.739681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.739859] env[62525]: DEBUG nova.network.neutron [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.774797] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522446f8-a3e1-86cb-9b66-88da8df51f7b, 'name': SearchDatastore_Task, 'duration_secs': 0.009165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.775754] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92cd8e71-0f1e-4c75-b563-af2dd301f596 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.780854] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1670.780854] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b261cb-4330-130d-ac1b-bbdecad221b5" [ 1670.780854] env[62525]: _type = "Task" [ 1670.780854] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.789765] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b261cb-4330-130d-ac1b-bbdecad221b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.102758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.110066] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.304s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.111635] env[62525]: INFO nova.compute.claims [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1671.131049] env[62525]: INFO nova.scheduler.client.report [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9 [ 1671.218430] env[62525]: DEBUG nova.compute.manager [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1671.218640] env[62525]: DEBUG nova.compute.manager [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1671.218853] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.218999] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.219978] env[62525]: DEBUG nova.network.neutron [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1671.292257] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b261cb-4330-130d-ac1b-bbdecad221b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009501} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.292575] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.292872] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1671.294291] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27efdd6e-bf48-4e01-9ab3-bc8e65fc7e96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.305255] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1671.305255] env[62525]: value = "task-1781715" [ 1671.305255] env[62525]: _type = "Task" [ 1671.305255] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.318140] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.463021] env[62525]: DEBUG nova.network.neutron [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.578289] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.578653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.642059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-32983b95-4f66-44e0-8a5b-4938791068a3 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "2ad723ff-6540-4bb4-b09e-52e6a9fb12b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.198s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.818063] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781715, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.945516] env[62525]: DEBUG nova.network.neutron [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updated VIF entry in instance network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1671.945923] env[62525]: DEBUG nova.network.neutron [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.963643] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.994521] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e91fa3dbef456d2b28514588c34b94d7',container_format='bare',created_at=2024-12-12T00:15:17Z,direct_url=,disk_format='vmdk',id=02017241-feb5-466c-b885-b516496197f8,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1751865425-shelved',owner='af5258cd7a314fc784be2d2e33e6eceb',properties=ImageMetaProps,protected=,size=31590912,status='active',tags=,updated_at=2024-12-12T00:15:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1671.994824] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1671.995179] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1671.995442] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1671.995646] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1671.995847] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1671.996144] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1671.996376] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1671.996602] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1671.997118] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1671.997118] env[62525]: DEBUG nova.virt.hardware [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1671.998237] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e29709-ef79-4012-859a-cfec868f5b6f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.007288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0725925-2ca7-4a3a-9fdc-5ba032a4b92d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.022660] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:5d:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc247c87-0d2d-47bf-9d66-5e81d9237fa6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.030380] env[62525]: DEBUG oslo.service.loopingcall [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.030934] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1672.031172] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e047904a-f8fb-43d5-b46f-c6148361e3ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.051228] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.051228] env[62525]: value = "task-1781716" [ 1672.051228] env[62525]: _type = "Task" [ 1672.051228] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.059203] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781716, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.082516] env[62525]: DEBUG nova.compute.utils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1672.315835] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529728} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.318590] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1672.319823] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801e136b-0868-4dac-a43e-6cf2f68b895f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.347993] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1672.350735] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6767fd-8b47-44b8-a460-8fa62df680da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.371018] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1672.371018] env[62525]: value = "task-1781717" [ 1672.371018] env[62525]: _type = "Task" [ 1672.371018] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.380600] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781717, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.402339] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbeeafa3-5453-4991-9913-9abae3e450d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.410099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b805a7-9b28-4cb3-833c-c4eef5a5d171 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.444169] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba31e093-10dd-4303-b5fd-a0d399254cbf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.450329] env[62525]: DEBUG oslo_concurrency.lockutils [req-8c72716c-218f-4edf-896c-f454b628c037 req-8f574dd7-d445-4ce7-a522-dd59c205cb2e service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.452057] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd400f2-54d8-420d-8e9b-b26099652535 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.466579] env[62525]: DEBUG nova.compute.provider_tree [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.560919] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781716, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.585788] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.881627] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.955159] env[62525]: DEBUG nova.compute.manager [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1672.955288] env[62525]: DEBUG nova.compute.manager [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing instance network info cache due to event network-changed-dc247c87-0d2d-47bf-9d66-5e81d9237fa6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1672.955505] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.955651] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.955818] env[62525]: DEBUG nova.network.neutron [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Refreshing network info cache for port dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.967863] env[62525]: DEBUG nova.scheduler.client.report [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1673.061946] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781716, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.248315] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1673.248527] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing instance network info cache due to event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1673.248739] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.248882] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.249057] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1673.382426] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.440394] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.440626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.476970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.476970] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1673.479746] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.168s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.481273] env[62525]: INFO nova.compute.claims [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1673.562742] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781716, 'name': CreateVM_Task, 'duration_secs': 1.129107} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.562953] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.563739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.563924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.564312] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.564567] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9f01cac-8a58-410c-9e22-ebe6f42c577f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.569693] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1673.569693] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5203bcb4-ea20-2b41-f6c2-fffec4a06448" [ 1673.569693] env[62525]: _type = "Task" [ 1673.569693] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.580187] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5203bcb4-ea20-2b41-f6c2-fffec4a06448, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.652783] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.653047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.653283] env[62525]: INFO nova.compute.manager [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Attaching volume 8adfb2c8-5f82-4a8c-83e6-582042005da0 to /dev/sdb [ 1673.665375] env[62525]: DEBUG nova.network.neutron [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updated VIF entry in instance network info cache for port dc247c87-0d2d-47bf-9d66-5e81d9237fa6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.665709] env[62525]: DEBUG nova.network.neutron [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.686635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0a266a-b54f-4b62-8be9-9ec470073a03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.693901] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a5db37-7947-433d-bdd9-a4654dc78a36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.708350] env[62525]: DEBUG nova.virt.block_device [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating existing volume attachment record: 1c80bec3-79dc-450f-9223-d8f96b36f2d8 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1673.885877] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781717, 'name': ReconfigVM_Task, 'duration_secs': 1.228905} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.886228] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1673.887147] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8e8f96-171e-4f02-a563-65c8369f5fcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.920724] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0de5bc8-f225-4a0b-86e0-57d2c0619619 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.935679] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1673.935679] env[62525]: value = "task-1781721" [ 1673.935679] env[62525]: _type = "Task" [ 1673.935679] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.944428] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1673.947030] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781721, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.967926] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updated VIF entry in instance network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.968332] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.981773] env[62525]: DEBUG nova.compute.utils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1673.983857] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1673.984114] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1674.048229] env[62525]: DEBUG nova.policy [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0eb16caec01e491a9369f27194a2836a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45e20a581c76424a8f8c2c844f1e04f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1674.083193] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.083425] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Processing image 02017241-feb5-466c-b885-b516496197f8 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.083683] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.083962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.084110] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.084878] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42346aab-bf5a-4388-9ef1-bb67871f1b2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.094565] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.094565] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.095168] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a7ac436-81a8-4702-b641-f07e0f77c8d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.101758] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1674.101758] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520273c1-172a-0079-e8a3-857b4eeebf52" [ 1674.101758] env[62525]: _type = "Task" [ 1674.101758] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.111363] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520273c1-172a-0079-e8a3-857b4eeebf52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.168260] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8f1674c-049a-4c3b-b985-3a4e2a9cfd40 req-3c8f543d-58a8-4089-932a-72a186484528 service nova] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.403213] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Successfully created port: 3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1674.451433] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781721, 'name': ReconfigVM_Task, 'duration_secs': 0.169448} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.454111] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1674.455406] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be1d1313-df10-4fb0-8f3e-a5eddc10b8e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.461651] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1674.461651] env[62525]: value = "task-1781722" [ 1674.461651] env[62525]: _type = "Task" [ 1674.461651] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.471219] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.471657] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.472777] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1674.472777] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing instance network info cache due to event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1674.472777] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.473106] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.473215] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1674.475047] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.488047] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1674.615962] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1674.616231] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Fetch image to [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34/OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1674.616449] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Downloading stream optimized image 02017241-feb5-466c-b885-b516496197f8 to [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34/OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34.vmdk on the data store datastore1 as vApp {{(pid=62525) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1674.616631] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Downloading image file data 02017241-feb5-466c-b885-b516496197f8 to the ESX as VM named 'OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34' {{(pid=62525) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1674.693246] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1674.693246] env[62525]: value = "resgroup-9" [ 1674.693246] env[62525]: _type = "ResourcePool" [ 1674.693246] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1674.693566] env[62525]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6173e5af-3a40-4ea5-bcc1-2a577f38fe18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.717721] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lease: (returnval){ [ 1674.717721] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1674.717721] env[62525]: _type = "HttpNfcLease" [ 1674.717721] env[62525]: } obtained for vApp import into resource pool (val){ [ 1674.717721] env[62525]: value = "resgroup-9" [ 1674.717721] env[62525]: _type = "ResourcePool" [ 1674.717721] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1674.718063] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the lease: (returnval){ [ 1674.718063] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1674.718063] env[62525]: _type = "HttpNfcLease" [ 1674.718063] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1674.726878] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1674.726878] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1674.726878] env[62525]: _type = "HttpNfcLease" [ 1674.726878] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1674.774900] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e858fc4-9098-4d46-8828-e3b417eaeb05 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.783835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4138606-f6a8-4aa4-b3b2-95cc81393c56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.814459] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0078904e-31dc-4983-b6c5-68f01bdf037d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.821722] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec261162-d9aa-4fa0-9d60-6597989d7e64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.834669] env[62525]: DEBUG nova.compute.provider_tree [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.903631] env[62525]: INFO nova.compute.manager [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Rebuilding instance [ 1674.940283] env[62525]: DEBUG nova.compute.manager [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1674.941187] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a043293-64ab-4654-8fe8-29974f71ff1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.970467] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781722, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.228226] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1675.228226] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1675.228226] env[62525]: _type = "HttpNfcLease" [ 1675.228226] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1675.282525] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updated VIF entry in instance network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1675.283167] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.338103] env[62525]: DEBUG nova.scheduler.client.report [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1675.451724] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.452094] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3dc107f-4427-4158-b21e-341f10d25636 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.459330] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1675.459330] env[62525]: value = "task-1781724" [ 1675.459330] env[62525]: _type = "Task" [ 1675.459330] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.471054] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.474062] env[62525]: DEBUG oslo_vmware.api [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781722, 'name': PowerOnVM_Task, 'duration_secs': 0.915802} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.474321] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1675.477294] env[62525]: DEBUG nova.compute.manager [None req-ce118aac-d1b5-4f23-b80f-65fa477e5245 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1675.478063] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2c382a-e84a-40af-af00-23e5f32fdc1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.497411] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1675.524070] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.524386] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.524590] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.524814] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.525030] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.525288] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.525700] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.525963] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.526237] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.526483] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.526739] env[62525]: DEBUG nova.virt.hardware [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.527882] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb1339a-96b6-4ef7-8f82-9c28939b2397 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.537814] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5b46ac-1c79-4af3-9591-6eeff85fcbe4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.728901] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1675.728901] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1675.728901] env[62525]: _type = "HttpNfcLease" [ 1675.728901] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1675.729241] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1675.729241] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52226310-4d9a-f6b1-1cf5-fb72492f48cc" [ 1675.729241] env[62525]: _type = "HttpNfcLease" [ 1675.729241] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1675.729958] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f3fe96-86d1-4b52-8452-2c48eeeed210 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.737015] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1675.737206] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating HTTP connection to write to file with size = 31590912 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1675.794363] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.794635] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1675.794804] env[62525]: DEBUG nova.compute.manager [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1675.795032] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.795196] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.795367] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.801755] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-274649cd-fdc3-4132-92c1-db716e43fe5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.829164] env[62525]: DEBUG nova.compute.manager [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Received event network-vif-plugged-3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1675.829479] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.829727] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.829926] env[62525]: DEBUG oslo_concurrency.lockutils [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.830137] env[62525]: DEBUG nova.compute.manager [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] No waiting events found dispatching network-vif-plugged-3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1675.830343] env[62525]: WARNING nova.compute.manager [req-6d1428b0-6ae6-42c2-9012-ed0adf8732a9 req-f1c0070d-4878-436b-bd30-6d730889a2f1 service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Received unexpected event network-vif-plugged-3edc6d99-8711-4b37-869a-4e1238dc7a5a for instance with vm_state building and task_state spawning. [ 1675.843195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.843742] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1675.847932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.791s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.849397] env[62525]: INFO nova.compute.claims [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1675.926016] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Successfully updated port: 3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1675.971507] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781724, 'name': PowerOffVM_Task, 'duration_secs': 0.228196} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.971792] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.972044] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1675.972841] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93025eeb-c624-49cf-8293-938464af16ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.981114] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1675.981259] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48cc199f-df29-423d-a8f3-f58d975205de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.066723] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.066723] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.066723] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleting the datastore file [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.066723] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8aef7b2f-4676-4bd3-9e75-9726d125e2eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.074570] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1676.074570] env[62525]: value = "task-1781727" [ 1676.074570] env[62525]: _type = "Task" [ 1676.074570] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.084371] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.360039] env[62525]: DEBUG nova.compute.utils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1676.361708] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1676.361878] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1676.427162] env[62525]: DEBUG nova.policy [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18bd37c3a74a4873a12092f31ccb07f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeeaeb287b194ebfb0c57e33ef138187', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1676.431832] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.431992] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.432180] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1676.557884] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updated VIF entry in instance network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.558366] env[62525]: DEBUG nova.network.neutron [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.588273] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.588456] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.588688] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.588969] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.870109] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1676.946348] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Successfully created port: 2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1676.979903] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1677.066230] env[62525]: DEBUG oslo_concurrency.lockutils [req-235533d5-6043-4571-bbb5-1a4a331b1c0a req-ccf2788b-dd50-484e-839a-1baf63616bad service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.103360] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1677.103566] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1677.103997] env[62525]: INFO nova.compute.manager [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Unrescuing [ 1677.104295] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.104459] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.104636] env[62525]: DEBUG nova.network.neutron [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1677.106400] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c08be32-3f7d-4392-b6f9-288f541c77f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.114828] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1677.115102] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1677.115862] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-25cbc604-a582-4597-9da6-e44d19e4c439 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.146348] env[62525]: DEBUG nova.network.neutron [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.180477] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c237605-a718-4a9c-a9fc-55387c5bbbe0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.188220] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abb8708-5ad4-4304-88a9-f7ee3fadfa4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.220515] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7e7e45-7929-4698-ac35-2012ed188b33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.228580] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e4bc08-b28d-478c-8af7-f8fac5c2184c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.242226] env[62525]: DEBUG nova.compute.provider_tree [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.495756] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Successfully created port: 5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1677.581107] env[62525]: DEBUG oslo_vmware.rw_handles [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522599b6-a8ce-dd3b-eb2e-be8f6a9e24ed/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1677.581391] env[62525]: INFO nova.virt.vmwareapi.images [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Downloaded image file data 02017241-feb5-466c-b885-b516496197f8 [ 1677.582277] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44661df-0d0f-43a3-85cd-7a619b37f96d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.602743] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76bfd6e5-a9dc-4287-a11a-9ea2cc64e48d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.631098] env[62525]: INFO nova.virt.vmwareapi.images [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] The imported VM was unregistered [ 1677.632926] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1677.633252] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Creating directory with path [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1677.633986] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb4db32f-ac43-4aeb-a3a4-55986fc3cc27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.646253] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1677.646496] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1677.646650] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1677.646820] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1677.646960] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1677.647131] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1677.647352] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1677.647527] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1677.647792] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1677.647987] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1677.648186] env[62525]: DEBUG nova.virt.hardware [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1677.648656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.648938] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Instance network_info: |[{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1677.649753] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc6caf1-6a3f-4072-a608-70ef0d32461f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.655579] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:3a:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3edc6d99-8711-4b37-869a-4e1238dc7a5a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.662890] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Creating folder: Project (45e20a581c76424a8f8c2c844f1e04f9). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1677.663647] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Created directory with path [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1677.663833] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34/OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34.vmdk to [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk. {{(pid=62525) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1677.664079] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0cef80a-b9f7-4fb0-83ad-8bb2e289c2e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.666103] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-649539c0-ea70-4d2e-900f-146dd263ea1f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.671835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f4e92c-2ebd-4718-8c5f-5f2badd39503 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.677592] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1677.677592] env[62525]: value = "task-1781730" [ 1677.677592] env[62525]: _type = "Task" [ 1677.677592] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.689902] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:c0:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d3644a6-dbaa-4a30-930a-53beadf8704a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.698534] env[62525]: DEBUG oslo.service.loopingcall [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.698869] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Created folder: Project (45e20a581c76424a8f8c2c844f1e04f9) in parent group-v369553. [ 1677.699056] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Creating folder: Instances. Parent ref: group-v369798. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1677.699763] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1677.702018] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6323693b-1ff5-4102-9556-75ef16dc7e66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.705145] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83597f78-49c0-401a-9b7a-250981a80b69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.721415] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.730403] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.730403] env[62525]: value = "task-1781732" [ 1677.730403] env[62525]: _type = "Task" [ 1677.730403] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.734292] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Created folder: Instances in parent group-v369798. [ 1677.738025] env[62525]: DEBUG oslo.service.loopingcall [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.738025] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1677.738025] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3160cc2-491d-4682-a7a2-fad4b14eceeb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.758531] env[62525]: DEBUG nova.scheduler.client.report [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1677.761851] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781732, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.769839] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.769839] env[62525]: value = "task-1781733" [ 1677.769839] env[62525]: _type = "Task" [ 1677.769839] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.778133] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781733, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.879693] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1677.908931] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1677.908931] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1677.908931] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1677.909249] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1677.909249] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1677.909628] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1677.909853] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1677.909853] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1677.910021] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1677.910221] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1677.910407] env[62525]: DEBUG nova.virt.hardware [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1677.911696] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93a316f-bd73-48dc-8278-787af11f0d6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.915567] env[62525]: DEBUG nova.network.neutron [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.923343] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7474ec0d-ec9d-4f9b-985d-23af1119d0aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.012283] env[62525]: DEBUG nova.compute.manager [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Received event network-changed-3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1678.012542] env[62525]: DEBUG nova.compute.manager [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Refreshing instance network info cache due to event network-changed-3edc6d99-8711-4b37-869a-4e1238dc7a5a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1678.012782] env[62525]: DEBUG oslo_concurrency.lockutils [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] Acquiring lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.013164] env[62525]: DEBUG oslo_concurrency.lockutils [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] Acquired lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.013164] env[62525]: DEBUG nova.network.neutron [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Refreshing network info cache for port 3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1678.187790] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.241168] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781732, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.257560] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1678.257881] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369796', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'name': 'volume-8adfb2c8-5f82-4a8c-83e6-582042005da0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '82443424-6071-44b3-bd9a-f92a1a650f27', 'attached_at': '', 'detached_at': '', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'serial': '8adfb2c8-5f82-4a8c-83e6-582042005da0'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1678.258784] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8625a65-6fd2-40f4-b8c7-d03604d69b66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.277176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.278759] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1678.283030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.986s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.284072] env[62525]: INFO nova.compute.claims [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1678.291195] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e94981-1bad-4413-be11-8934c158e8cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.300103] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781733, 'name': CreateVM_Task, 'duration_secs': 0.473189} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.317134] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1678.325681] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] volume-8adfb2c8-5f82-4a8c-83e6-582042005da0/volume-8adfb2c8-5f82-4a8c-83e6-582042005da0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1678.327643] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.327824] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.328195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1678.328457] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c9c8dae-2284-49a6-b10c-8991ec87571f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.341838] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60b4d690-d39a-4dbe-8459-c15018f15d21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.348396] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1678.348396] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52509b22-7529-2c79-055b-cb7474225641" [ 1678.348396] env[62525]: _type = "Task" [ 1678.348396] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.353273] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1678.353273] env[62525]: value = "task-1781734" [ 1678.353273] env[62525]: _type = "Task" [ 1678.353273] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.360216] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52509b22-7529-2c79-055b-cb7474225641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.366466] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.418604] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.419288] env[62525]: DEBUG nova.objects.instance [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'flavor' on Instance uuid 0067de08-6708-4c7c-a83a-ed9df193d5cd {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.689870] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.733959] env[62525]: DEBUG nova.network.neutron [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updated VIF entry in instance network info cache for port 3edc6d99-8711-4b37-869a-4e1238dc7a5a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1678.734368] env[62525]: DEBUG nova.network.neutron [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.747055] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781732, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.784182] env[62525]: DEBUG nova.compute.utils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1678.785710] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1678.785883] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1678.844071] env[62525]: DEBUG nova.policy [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd906b2c1755b466991ea7d22fa90df8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e48ef541f0e4b689d5d86782efb8db5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1678.861545] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52509b22-7529-2c79-055b-cb7474225641, 'name': SearchDatastore_Task, 'duration_secs': 0.08888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.862129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.862281] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.862519] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.862669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.862852] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1678.863150] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-942288f2-836d-4fb6-a032-11a102129704 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.868554] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.884722] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1678.885021] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1678.885848] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95df3a6e-b692-49cf-aaf3-71af272ac82d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.893438] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1678.893438] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5238ace9-d30b-8a2e-93e9-280d62785a10" [ 1678.893438] env[62525]: _type = "Task" [ 1678.893438] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.902897] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5238ace9-d30b-8a2e-93e9-280d62785a10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.924958] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce21a584-b59c-450a-8660-0c6b32c3fbde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.951511] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1678.951874] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0b8371e-6ac5-465d-b3e5-328e5094af3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.960086] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1678.960086] env[62525]: value = "task-1781735" [ 1678.960086] env[62525]: _type = "Task" [ 1678.960086] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.969450] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.132107] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Successfully created port: a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.190989] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.240934] env[62525]: DEBUG oslo_concurrency.lockutils [req-61fb2636-ca7f-4b9b-8261-0db83ca6a600 req-5ace56a2-b38c-43de-a206-f6eacc68e1ea service nova] Releasing lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.245882] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781732, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.289437] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1679.365511] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.407626] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5238ace9-d30b-8a2e-93e9-280d62785a10, 'name': SearchDatastore_Task, 'duration_secs': 0.098555} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.408882] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32928121-eaa4-4ec7-b8a4-d400814c00ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.417134] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1679.417134] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cb24e0-a0be-8b47-ccf9-c8b4e84f182f" [ 1679.417134] env[62525]: _type = "Task" [ 1679.417134] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.428008] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cb24e0-a0be-8b47-ccf9-c8b4e84f182f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.470927] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781735, 'name': PowerOffVM_Task, 'duration_secs': 0.270397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.470927] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1679.477036] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfiguring VM instance instance-00000037 to detach disk 2002 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1679.480479] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-539b0888-5c8e-459e-8d57-6431b6a94381 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.497318] env[62525]: DEBUG nova.compute.manager [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-vif-plugged-2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.497543] env[62525]: DEBUG oslo_concurrency.lockutils [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] Acquiring lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.497779] env[62525]: DEBUG oslo_concurrency.lockutils [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.497959] env[62525]: DEBUG oslo_concurrency.lockutils [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.498189] env[62525]: DEBUG nova.compute.manager [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] No waiting events found dispatching network-vif-plugged-2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1679.498389] env[62525]: WARNING nova.compute.manager [req-f74c787e-efcd-4cb4-aa58-7c5e7f0ca782 req-df6be869-814e-4b9d-805e-cf30850c4eef service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received unexpected event network-vif-plugged-2ac247b9-f66a-46ac-9bea-2b9c1870ba66 for instance with vm_state building and task_state spawning. [ 1679.504889] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1679.504889] env[62525]: value = "task-1781736" [ 1679.504889] env[62525]: _type = "Task" [ 1679.504889] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.518669] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781736, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.638250] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e378247-a5b5-48e7-8961-ec8e69e4e035 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.647532] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129c4c83-ebce-4608-b857-4ecc721edd23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.687908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa92883-2377-46b0-9f68-0d5dd366066e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.696915] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.700564] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74dc898-ce2b-4287-9e08-94f723cd4b07 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.716571] env[62525]: DEBUG nova.compute.provider_tree [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.747197] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781732, 'name': CreateVM_Task, 'duration_secs': 1.821939} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.747466] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1679.748644] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.748644] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.748796] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1679.749094] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d518156a-e572-4ca6-82c4-7b0ffbbfd7d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.754506] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1679.754506] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]523ec533-349e-f459-b03f-685bb6b51d97" [ 1679.754506] env[62525]: _type = "Task" [ 1679.754506] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.763113] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523ec533-349e-f459-b03f-685bb6b51d97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.866169] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.928398] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cb24e0-a0be-8b47-ccf9-c8b4e84f182f, 'name': SearchDatastore_Task, 'duration_secs': 0.112796} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.928720] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.929029] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1679.929330] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31a77f5d-54e7-474a-8fb0-89daf543f70c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.936926] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1679.936926] env[62525]: value = "task-1781737" [ 1679.936926] env[62525]: _type = "Task" [ 1679.936926] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.945622] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.004592] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Successfully updated port: 2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1680.018064] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781736, 'name': ReconfigVM_Task, 'duration_secs': 0.271387} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.018064] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfigured VM instance instance-00000037 to detach disk 2002 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1680.018064] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1680.018064] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d223cea-320d-4909-a27f-3834b2a9cff0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.025850] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1680.025850] env[62525]: value = "task-1781738" [ 1680.025850] env[62525]: _type = "Task" [ 1680.025850] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.036539] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.086168] env[62525]: DEBUG nova.compute.manager [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-changed-2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1680.086570] env[62525]: DEBUG nova.compute.manager [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Refreshing instance network info cache due to event network-changed-2ac247b9-f66a-46ac-9bea-2b9c1870ba66. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1680.086998] env[62525]: DEBUG oslo_concurrency.lockutils [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] Acquiring lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.087176] env[62525]: DEBUG oslo_concurrency.lockutils [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] Acquired lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.087481] env[62525]: DEBUG nova.network.neutron [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Refreshing network info cache for port 2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1680.193735] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.220122] env[62525]: DEBUG nova.scheduler.client.report [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1680.265760] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]523ec533-349e-f459-b03f-685bb6b51d97, 'name': SearchDatastore_Task, 'duration_secs': 0.087503} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.266145] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.266611] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1680.266895] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.267134] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.267311] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1680.267627] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45d1d5d1-08d2-426e-807b-1c4b1f07181a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.276965] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1680.277289] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1680.278034] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca3dc166-cdd4-4403-9fae-d193253d1173 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.284310] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1680.284310] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b77f39-ffbe-5b34-b69e-f2298dedfd9a" [ 1680.284310] env[62525]: _type = "Task" [ 1680.284310] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.292419] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b77f39-ffbe-5b34-b69e-f2298dedfd9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.298706] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1680.328727] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1680.329455] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1680.329455] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.329455] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1680.329589] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.329642] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1680.329852] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1680.330017] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1680.330226] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1680.330414] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1680.330595] env[62525]: DEBUG nova.virt.hardware [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1680.331569] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824bca41-a432-4382-bda0-54be435eb4d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.339746] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d00f8b6-9d25-453d-9cb9-b339d7db9926 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.364997] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.448997] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781737, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.536345] env[62525]: DEBUG oslo_vmware.api [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781738, 'name': PowerOnVM_Task, 'duration_secs': 0.442564} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.536679] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.536924] env[62525]: DEBUG nova.compute.manager [None req-b914eea4-0077-4af0-bed3-cb1e4206f85b tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1680.537812] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9237f4d-dcd5-4d60-b464-c00a827f4d2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.614543] env[62525]: DEBUG nova.compute.manager [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Received event network-vif-plugged-a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1680.614543] env[62525]: DEBUG oslo_concurrency.lockutils [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] Acquiring lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.614543] env[62525]: DEBUG oslo_concurrency.lockutils [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.614543] env[62525]: DEBUG oslo_concurrency.lockutils [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.614543] env[62525]: DEBUG nova.compute.manager [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] No waiting events found dispatching network-vif-plugged-a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1680.614947] env[62525]: WARNING nova.compute.manager [req-24509b8b-cbab-4bab-bdaa-5ece3ad03f67 req-36824625-4f07-487e-adf8-c5527e5f4731 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Received unexpected event network-vif-plugged-a9ec3613-8b89-413b-831b-896e679be20d for instance with vm_state building and task_state spawning. [ 1680.643166] env[62525]: DEBUG nova.network.neutron [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1680.705024] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781730, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.641497} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.705024] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34/OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34.vmdk to [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk. [ 1680.705024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Cleaning up location [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1680.705024] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_de303810-ee41-4d43-b622-c96f55b02f34 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1680.705024] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdf4fe99-b359-4737-9047-76bcadd79be5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.720146] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1680.720146] env[62525]: value = "task-1781739" [ 1680.720146] env[62525]: _type = "Task" [ 1680.720146] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.728623] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.728623] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1680.735188] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.164s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.735633] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.735960] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1680.736458] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.617s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.738674] env[62525]: INFO nova.compute.claims [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.743123] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a796ddf-47d5-418c-be1b-0cc7a04712da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.755102] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.762021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3438559a-2957-4dc6-acd0-506cae8c5973 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.779408] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b80bc7-0de1-4f52-9695-f94c6ddecd46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.792058] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa4fbd7-eec7-4ac8-807d-85c477a0f061 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.800286] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b77f39-ffbe-5b34-b69e-f2298dedfd9a, 'name': SearchDatastore_Task, 'duration_secs': 0.00934} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.801709] env[62525]: DEBUG nova.network.neutron [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.803213] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6b36750-bda5-4102-a494-60036eb74cbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.830368] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179079MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1680.830519] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.831498] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Successfully updated port: a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1680.836193] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1680.836193] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5219e5e0-5c65-21d0-268f-eab1c501102d" [ 1680.836193] env[62525]: _type = "Task" [ 1680.836193] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.845314] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5219e5e0-5c65-21d0-268f-eab1c501102d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.865677] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781734, 'name': ReconfigVM_Task, 'duration_secs': 2.117527} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.865938] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfigured VM instance instance-00000049 to attach disk [datastore1] volume-8adfb2c8-5f82-4a8c-83e6-582042005da0/volume-8adfb2c8-5f82-4a8c-83e6-582042005da0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1680.870765] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5f6c24f-1fc8-44dd-afed-4f9a33feed2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.886845] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1680.886845] env[62525]: value = "task-1781740" [ 1680.886845] env[62525]: _type = "Task" [ 1680.886845] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.896889] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.947913] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840171} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.948229] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1680.948466] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1680.948722] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca5e3df2-ac58-4d9a-844a-5a0757411f78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.957298] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1680.957298] env[62525]: value = "task-1781741" [ 1680.957298] env[62525]: _type = "Task" [ 1680.957298] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.965760] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.235527] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075653} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.235807] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1681.235972] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "[datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.236237] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk to [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.236508] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6d26a3c-eada-4610-97ac-cd21075aabaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.243710] env[62525]: DEBUG nova.compute.utils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1681.247213] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1681.247213] env[62525]: value = "task-1781742" [ 1681.247213] env[62525]: _type = "Task" [ 1681.247213] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.247406] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1681.247479] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1681.257663] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.297520] env[62525]: DEBUG nova.policy [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef8d66aa33c2442ea266c3b687ba2d6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77eae31161444518aadfe27dd51c2081', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1681.330136] env[62525]: DEBUG oslo_concurrency.lockutils [req-df156ffc-9979-407a-bea1-b1705c852cdd req-ee5c2a50-8059-4929-a5e9-7d69359ad65e service nova] Releasing lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.333921] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.334087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.334227] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1681.347038] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5219e5e0-5c65-21d0-268f-eab1c501102d, 'name': SearchDatastore_Task, 'duration_secs': 0.016185} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.347311] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.347571] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.347834] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6bb8c79-22c0-4612-86c4-a1e0bd4599e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.355599] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1681.355599] env[62525]: value = "task-1781743" [ 1681.355599] env[62525]: _type = "Task" [ 1681.355599] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.363793] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.396741] env[62525]: DEBUG oslo_vmware.api [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781740, 'name': ReconfigVM_Task, 'duration_secs': 0.148555} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.397114] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369796', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'name': 'volume-8adfb2c8-5f82-4a8c-83e6-582042005da0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '82443424-6071-44b3-bd9a-f92a1a650f27', 'attached_at': '', 'detached_at': '', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'serial': '8adfb2c8-5f82-4a8c-83e6-582042005da0'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1681.468497] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781741, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091606} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.468928] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1681.469965] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce46291b-ea54-4df1-b801-1aab81f12697 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.498539] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1681.499022] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba45cd8-09ea-4eb2-bc43-2bb3a6548cf5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.522761] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1681.522761] env[62525]: value = "task-1781744" [ 1681.522761] env[62525]: _type = "Task" [ 1681.522761] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.539211] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781744, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.747879] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1681.772358] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.863958] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Successfully created port: 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1681.875189] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.876397] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1682.037466] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.056471] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddce252-55e2-46fd-825c-702ef6d4a66b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.065351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4db7d5e-0822-4f83-b404-6c5a0acc6c85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.104183] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85527975-8828-488d-be51-f10f754f78a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.112615] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44cd664-ddf8-45a0-8cb7-894972fb74af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.128365] env[62525]: DEBUG nova.compute.provider_tree [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.219232] env[62525]: DEBUG nova.network.neutron [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Updating instance_info_cache with network_info: [{"id": "a9ec3613-8b89-413b-831b-896e679be20d", "address": "fa:16:3e:72:8b:20", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ec3613-8b", "ovs_interfaceid": "a9ec3613-8b89-413b-831b-896e679be20d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.279180] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.368581] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.450781] env[62525]: DEBUG nova.objects.instance [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid 82443424-6071-44b3-bd9a-f92a1a650f27 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1682.534194] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781744, 'name': ReconfigVM_Task, 'duration_secs': 0.603582} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.534723] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.535542] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e18361a-2d01-4c34-ac26-4b392f5c79b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.543349] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1682.543349] env[62525]: value = "task-1781745" [ 1682.543349] env[62525]: _type = "Task" [ 1682.543349] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.555259] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781745, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.593199] env[62525]: DEBUG nova.compute.manager [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-vif-plugged-5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1682.597026] env[62525]: DEBUG oslo_concurrency.lockutils [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] Acquiring lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.597026] env[62525]: DEBUG oslo_concurrency.lockutils [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.597026] env[62525]: DEBUG oslo_concurrency.lockutils [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.597026] env[62525]: DEBUG nova.compute.manager [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] No waiting events found dispatching network-vif-plugged-5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1682.597026] env[62525]: WARNING nova.compute.manager [req-6bc01217-79df-4fd3-98e7-1816cbe57e3c req-541113ba-2f04-4cea-9d81-b2b0477575fb service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received unexpected event network-vif-plugged-5b882b83-1f22-4eb3-845f-766cac71d2de for instance with vm_state building and task_state spawning. [ 1682.633346] env[62525]: DEBUG nova.scheduler.client.report [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1682.722118] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1682.722473] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Instance network_info: |[{"id": "a9ec3613-8b89-413b-831b-896e679be20d", "address": "fa:16:3e:72:8b:20", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ec3613-8b", "ovs_interfaceid": "a9ec3613-8b89-413b-831b-896e679be20d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1682.722985] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:8b:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9ec3613-8b89-413b-831b-896e679be20d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1682.732167] env[62525]: DEBUG oslo.service.loopingcall [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1682.733013] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1682.733300] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-507468bd-9056-4eca-993b-f5e8c69e8c99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.755541] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1682.755541] env[62525]: value = "task-1781746" [ 1682.755541] env[62525]: _type = "Task" [ 1682.755541] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.767023] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781746, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.769830] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1682.775810] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.801523] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1682.801853] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1682.802044] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1682.802444] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1682.802607] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1682.802760] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1682.802972] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1682.803146] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1682.803314] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1682.803479] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1682.803647] env[62525]: DEBUG nova.virt.hardware [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1682.808530] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30d1b7b-a918-48d5-8ae1-3c05a9aa54f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.817187] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee16b42-8a3a-459c-9b4d-d8ef0f3df5a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.838654] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Successfully updated port: 5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1682.868596] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.889986] env[62525]: DEBUG nova.compute.manager [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Received event network-changed-a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1682.890291] env[62525]: DEBUG nova.compute.manager [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Refreshing instance network info cache due to event network-changed-a9ec3613-8b89-413b-831b-896e679be20d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1682.890545] env[62525]: DEBUG oslo_concurrency.lockutils [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] Acquiring lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.890944] env[62525]: DEBUG oslo_concurrency.lockutils [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] Acquired lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.891182] env[62525]: DEBUG nova.network.neutron [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Refreshing network info cache for port a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1682.956815] env[62525]: DEBUG oslo_concurrency.lockutils [None req-5f7a0688-13a1-44e4-8cfb-4996c9e204f5 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.303s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.059050] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781745, 'name': Rename_Task, 'duration_secs': 0.379666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.059050] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1683.059050] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6aaaa70-2c58-4c50-8893-08b90f1408c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.065400] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1683.065400] env[62525]: value = "task-1781747" [ 1683.065400] env[62525]: _type = "Task" [ 1683.065400] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.076964] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.145992] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.145992] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1683.153336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.680s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.155890] env[62525]: INFO nova.compute.claims [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1683.267122] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781746, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.276489] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.341959] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.342144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.342303] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1683.367897] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.585820] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781747, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.668028] env[62525]: DEBUG nova.compute.utils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1683.677070] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1683.677260] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1683.769895] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781746, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.778322] env[62525]: DEBUG nova.policy [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36a7f35bf96d42b4a42e1cf71a15accb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3263280a4a14e87ac174d07c5dcb443', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1683.788694] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.800044] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.800402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.857810] env[62525]: DEBUG nova.network.neutron [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Updated VIF entry in instance network info cache for port a9ec3613-8b89-413b-831b-896e679be20d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1683.858225] env[62525]: DEBUG nova.network.neutron [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Updating instance_info_cache with network_info: [{"id": "a9ec3613-8b89-413b-831b-896e679be20d", "address": "fa:16:3e:72:8b:20", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ec3613-8b", "ovs_interfaceid": "a9ec3613-8b89-413b-831b-896e679be20d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.875282] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781743, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.332046} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.876643] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1683.877521] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1683.878775] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64f39ec6-67df-4069-8ed7-3b56db577046 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.888045] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1683.888045] env[62525]: value = "task-1781748" [ 1683.888045] env[62525]: _type = "Task" [ 1683.888045] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.898604] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.944550] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1684.020709] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Successfully updated port: 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.085671] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781747, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.178030] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1684.272866] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781746, 'name': CreateVM_Task, 'duration_secs': 1.311967} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.275827] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1684.276537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.276715] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.277051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1684.277643] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7482901-e3f8-4348-b18f-00d015e6dfcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.282528] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781742, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.644991} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.285146] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/02017241-feb5-466c-b885-b516496197f8/02017241-feb5-466c-b885-b516496197f8.vmdk to [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1684.286567] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9818c97d-0e62-480e-8dfb-f4da63b2d31b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.290153] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1684.290153] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a13d53-c479-9771-cbb3-160ef3dcd048" [ 1684.290153] env[62525]: _type = "Task" [ 1684.290153] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.303983] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1684.318260] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1684.322908] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.323181] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.323531] env[62525]: DEBUG nova.objects.instance [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.326051] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38c05af-7310-4512-95a8-825ed3659c1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.345193] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Successfully created port: d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1684.348522] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a13d53-c479-9771-cbb3-160ef3dcd048, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.349167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.349427] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1684.349645] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.349789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.350083] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.350680] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6916f00-a62c-4f58-b8e0-734b3860338e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.356990] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1684.356990] env[62525]: value = "task-1781749" [ 1684.356990] env[62525]: _type = "Task" [ 1684.356990] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.363449] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1684.363633] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1684.367092] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba9b9f7-2c6a-41e5-9829-52e334aae3d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.369412] env[62525]: DEBUG oslo_concurrency.lockutils [req-97aa66f6-7e8f-43d9-9a20-df79109a83d5 req-5b46632b-859d-4b3c-af3e-5f85a4bccfdb service nova] Releasing lock "refresh_cache-73156235-1b13-4fda-8957-ed8cd88ceb43" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.369817] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.375734] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1684.375734] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527a7254-6331-76d7-4d70-994b5f86818c" [ 1684.375734] env[62525]: _type = "Task" [ 1684.375734] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.385340] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527a7254-6331-76d7-4d70-994b5f86818c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.396460] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080273} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.398768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1684.399690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07916beb-c9e8-4da2-8342-aa838158db32 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.423401] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1684.426304] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c8ef27b-61a3-4ef9-b292-51ad8836d8c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.447484] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1684.447484] env[62525]: value = "task-1781750" [ 1684.447484] env[62525]: _type = "Task" [ 1684.447484] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.455654] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781750, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.507053] env[62525]: DEBUG nova.network.neutron [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Updating instance_info_cache with network_info: [{"id": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "address": "fa:16:3e:46:08:83", "network": {"id": "3ed1950d-1a41-456b-9320-7a1191fa12cf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1734441925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac247b9-f6", "ovs_interfaceid": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b882b83-1f22-4eb3-845f-766cac71d2de", "address": "fa:16:3e:8d:b0:d4", "network": {"id": "9ef45080-ec0c-4507-9b95-b8f62fe7d446", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-258897051", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b882b83-1f", "ovs_interfaceid": "5b882b83-1f22-4eb3-845f-766cac71d2de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.527142] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.527279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.527427] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.587379] env[62525]: DEBUG oslo_vmware.api [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781747, 'name': PowerOnVM_Task, 'duration_secs': 1.07973} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.587654] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1684.587923] env[62525]: INFO nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1684.588118] env[62525]: DEBUG nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1684.588916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db5e531-f245-49d8-a290-e1739394606e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.622020] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a224712-1ec0-4050-aedc-9ba5b68e3697 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.628352] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf987a3-11e4-4d33-b730-ad66881c638b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.661550] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577100c2-44c2-441f-b658-6f94de39adb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.670316] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39207ff2-20d7-4e4e-9037-f0782eb17072 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.685454] env[62525]: DEBUG nova.compute.provider_tree [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1684.832964] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.866484] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781749, 'name': ReconfigVM_Task, 'duration_secs': 0.301996} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.866916] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e8586018-100e-4729-97fc-98effa87cd9e/e8586018-100e-4729-97fc-98effa87cd9e.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1684.867424] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77ecddbb-f108-4050-ac41-a16235bb5bb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.876780] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1684.876780] env[62525]: value = "task-1781751" [ 1684.876780] env[62525]: _type = "Task" [ 1684.876780] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.890688] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527a7254-6331-76d7-4d70-994b5f86818c, 'name': SearchDatastore_Task, 'duration_secs': 0.010978} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.895859] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781751, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.897693] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-changed-5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1684.897693] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Refreshing instance network info cache due to event network-changed-5b882b83-1f22-4eb3-845f-766cac71d2de. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1684.898126] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquiring lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.898126] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e679a021-3267-4292-9130-2fd27a1e28bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.905130] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1684.905130] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524a65b4-1b23-fb07-4e5f-a9ef00d6b806" [ 1684.905130] env[62525]: _type = "Task" [ 1684.905130] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.913290] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524a65b4-1b23-fb07-4e5f-a9ef00d6b806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.953221] env[62525]: DEBUG nova.objects.instance [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.961137] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781750, 'name': ReconfigVM_Task, 'duration_secs': 0.301033} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.962092] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5/2f713b35-9d07-4d25-a333-506fd2469bd5.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1684.962722] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dac4fab-fa85-407f-a39c-333d3844f78f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.969966] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1684.969966] env[62525]: value = "task-1781752" [ 1684.969966] env[62525]: _type = "Task" [ 1684.969966] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.979784] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781752, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.010593] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.010593] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance network_info: |[{"id": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "address": "fa:16:3e:46:08:83", "network": {"id": "3ed1950d-1a41-456b-9320-7a1191fa12cf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1734441925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac247b9-f6", "ovs_interfaceid": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b882b83-1f22-4eb3-845f-766cac71d2de", "address": "fa:16:3e:8d:b0:d4", "network": {"id": "9ef45080-ec0c-4507-9b95-b8f62fe7d446", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-258897051", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b882b83-1f", "ovs_interfaceid": "5b882b83-1f22-4eb3-845f-766cac71d2de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1685.010593] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquired lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.010982] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Refreshing network info cache for port 5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.012412] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:08:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f68ebd2a-3c68-48db-8c32-8a01497fc2e7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ac247b9-f66a-46ac-9bea-2b9c1870ba66', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:b0:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b882b83-1f22-4eb3-845f-766cac71d2de', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.023038] env[62525]: DEBUG oslo.service.loopingcall [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.025979] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.026528] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e0f2271-7e54-4761-9599-6dd56e6dfb2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.052913] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.052913] env[62525]: value = "task-1781753" [ 1685.052913] env[62525]: _type = "Task" [ 1685.052913] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.061314] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781753, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.063211] env[62525]: DEBUG nova.compute.manager [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Received event network-vif-plugged-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.063471] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.063700] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.063868] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.064068] env[62525]: DEBUG nova.compute.manager [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] No waiting events found dispatching network-vif-plugged-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1685.064231] env[62525]: WARNING nova.compute.manager [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Received unexpected event network-vif-plugged-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 for instance with vm_state building and task_state spawning. [ 1685.064401] env[62525]: DEBUG nova.compute.manager [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Received event network-changed-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.064585] env[62525]: DEBUG nova.compute.manager [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Refreshing instance network info cache due to event network-changed-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1685.064774] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Acquiring lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.093587] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1685.097276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.099798] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.110231] env[62525]: INFO nova.compute.manager [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Took 23.32 seconds to build instance. [ 1685.191236] env[62525]: DEBUG nova.scheduler.client.report [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1685.199021] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1685.224104] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1685.224566] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1685.225030] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.225133] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1685.225273] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.225441] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1685.225663] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1685.225840] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1685.226327] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1685.226567] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1685.226861] env[62525]: DEBUG nova.virt.hardware [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1685.227916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff40598b-25e7-4d23-8671-dafd3792ef90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.236519] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c987d02-4419-4d37-b963-99e316349b8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.330756] env[62525]: DEBUG nova.network.neutron [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating instance_info_cache with network_info: [{"id": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "address": "fa:16:3e:7b:40:66", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ad012c1-5e", "ovs_interfaceid": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.386851] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Updated VIF entry in instance network info cache for port 5b882b83-1f22-4eb3-845f-766cac71d2de. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1685.387273] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Updating instance_info_cache with network_info: [{"id": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "address": "fa:16:3e:46:08:83", "network": {"id": "3ed1950d-1a41-456b-9320-7a1191fa12cf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1734441925", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f68ebd2a-3c68-48db-8c32-8a01497fc2e7", "external-id": "nsx-vlan-transportzone-49", "segmentation_id": 49, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac247b9-f6", "ovs_interfaceid": "2ac247b9-f66a-46ac-9bea-2b9c1870ba66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5b882b83-1f22-4eb3-845f-766cac71d2de", "address": "fa:16:3e:8d:b0:d4", "network": {"id": "9ef45080-ec0c-4507-9b95-b8f62fe7d446", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-258897051", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.38", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "aeeaeb287b194ebfb0c57e33ef138187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b882b83-1f", "ovs_interfaceid": "5b882b83-1f22-4eb3-845f-766cac71d2de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.394329] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781751, 'name': Rename_Task, 'duration_secs': 0.141033} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.394620] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1685.394900] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a016d0fb-ca02-49ad-bc4b-d8727c372ebc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.401538] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1685.401538] env[62525]: value = "task-1781754" [ 1685.401538] env[62525]: _type = "Task" [ 1685.401538] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.412084] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.417762] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524a65b4-1b23-fb07-4e5f-a9ef00d6b806, 'name': SearchDatastore_Task, 'duration_secs': 0.010006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.418009] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.418269] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 73156235-1b13-4fda-8957-ed8cd88ceb43/73156235-1b13-4fda-8957-ed8cd88ceb43.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1685.418514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30e96378-126e-4a8c-9cb7-877a2b104d0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.425456] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1685.425456] env[62525]: value = "task-1781755" [ 1685.425456] env[62525]: _type = "Task" [ 1685.425456] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.433201] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.456708] env[62525]: DEBUG nova.objects.base [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1685.456970] env[62525]: DEBUG nova.network.neutron [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1685.479732] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781752, 'name': Rename_Task, 'duration_secs': 0.151571} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.480013] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1685.480273] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a86e515b-2701-46fc-84a5-0cc4bd0d5ab6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.486652] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1685.486652] env[62525]: value = "task-1781756" [ 1685.486652] env[62525]: _type = "Task" [ 1685.486652] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.495235] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.538291] env[62525]: DEBUG nova.policy [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1685.564540] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781753, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.610465] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1685.615029] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a03a17cb-1c01-424a-b33c-f9848057720a tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.832s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.698423] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.698971] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1685.702251] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.872s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.837569] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.838025] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Instance network_info: |[{"id": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "address": "fa:16:3e:7b:40:66", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ad012c1-5e", "ovs_interfaceid": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1685.840424] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Acquired lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.840424] env[62525]: DEBUG nova.network.neutron [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Refreshing network info cache for port 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.840424] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:40:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.850088] env[62525]: DEBUG oslo.service.loopingcall [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.850553] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.854143] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39650aad-cbe6-4e32-933a-5531a96abf0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.876760] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.876760] env[62525]: value = "task-1781757" [ 1685.876760] env[62525]: _type = "Task" [ 1685.876760] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.888468] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781757, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.893177] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Releasing lock "refresh_cache-5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.893177] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.893177] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing instance network info cache due to event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1685.893177] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.893177] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.893177] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.917680] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781754, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.937119] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781755, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.997801] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781756, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.066859] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781753, 'name': CreateVM_Task, 'duration_secs': 0.57859} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.066859] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1686.066859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.066859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.066859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1686.066859] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90510c03-b64b-450f-8089-e69d765c3035 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.072019] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1686.072019] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e3e30e-c451-927e-259b-40164ef9d475" [ 1686.072019] env[62525]: _type = "Task" [ 1686.072019] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.078853] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3e30e-c451-927e-259b-40164ef9d475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.140023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.165110] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Successfully updated port: d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.205720] env[62525]: DEBUG nova.compute.utils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1686.207628] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1686.207906] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1686.257350] env[62525]: DEBUG nova.policy [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1686.387735] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781757, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.419381] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781754, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.438259] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.438491] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 73156235-1b13-4fda-8957-ed8cd88ceb43/73156235-1b13-4fda-8957-ed8cd88ceb43.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1686.439051] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1686.439051] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2161185-4dd5-4923-a570-c934e103ceae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.445237] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1686.445237] env[62525]: value = "task-1781758" [ 1686.445237] env[62525]: _type = "Task" [ 1686.445237] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.456061] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.498364] env[62525]: DEBUG oslo_vmware.api [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781756, 'name': PowerOnVM_Task, 'duration_secs': 0.754666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.498364] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1686.498364] env[62525]: DEBUG nova.compute.manager [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1686.498590] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e582de5b-d895-4bbe-ba39-5f4d282216cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.583758] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3e30e-c451-927e-259b-40164ef9d475, 'name': SearchDatastore_Task, 'duration_secs': 0.012361} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.587616] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.587887] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1686.588233] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.588394] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.588580] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1686.589085] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a8ce36f-3dfb-46eb-b23a-f06e956b90c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.616780] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1686.617039] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1686.620244] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a58d33ff-c8de-494a-993c-9e0d59f83274 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.626406] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1686.626406] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528cafa8-e056-16d7-d66b-6f6d17cdcfa4" [ 1686.626406] env[62525]: _type = "Task" [ 1686.626406] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.635573] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528cafa8-e056-16d7-d66b-6f6d17cdcfa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.667452] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.667696] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.667836] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1686.673420] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Successfully created port: 713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1686.683736] env[62525]: DEBUG nova.network.neutron [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updated VIF entry in instance network info cache for port 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.684092] env[62525]: DEBUG nova.network.neutron [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating instance_info_cache with network_info: [{"id": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "address": "fa:16:3e:7b:40:66", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ad012c1-5e", "ovs_interfaceid": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.711660] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1686.722922] env[62525]: INFO nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating resource usage from migration 52a5775e-aa5d-4847-8a57-847f128e703a [ 1686.739967] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.740193] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.740521] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f589dc1-9244-475f-86d0-4b69b511508b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.740727] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 94560d78-071c-419d-ad10-f42a5b2271a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.740823] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0067de08-6708-4c7c-a83a-ed9df193d5cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.740967] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741101] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741263] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2f713b35-9d07-4d25-a333-506fd2469bd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741399] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 82443424-6071-44b3-bd9a-f92a1a650f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741618] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741712] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741767] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 80fbfbda-07fb-43ab-be74-3cbdaf890a55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.741916] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e8586018-100e-4729-97fc-98effa87cd9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.742056] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.742200] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 73156235-1b13-4fda-8957-ed8cd88ceb43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.742335] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.742449] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c75091c3-45d2-4c71-b2ad-d38e8a449624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.742595] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 4278fbb1-d2bd-4e92-aaca-260d40aa26b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1686.772646] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updated VIF entry in instance network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.773057] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.894837] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781757, 'name': CreateVM_Task, 'duration_secs': 0.568358} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.894837] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1686.895660] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.895962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.896583] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1686.896652] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ee62791-4dd5-4f85-8fde-a8c110b0398e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.903285] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1686.903285] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c53071-87fb-59f9-4b06-da8856af357c" [ 1686.903285] env[62525]: _type = "Task" [ 1686.903285] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.925764] env[62525]: DEBUG oslo_vmware.api [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781754, 'name': PowerOnVM_Task, 'duration_secs': 1.019682} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.926019] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c53071-87fb-59f9-4b06-da8856af357c, 'name': SearchDatastore_Task, 'duration_secs': 0.012652} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.928562] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1686.930505] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.930713] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1686.930924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.931677] env[62525]: DEBUG nova.compute.manager [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1686.966193] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071965} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.967089] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1686.969041] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4b6b84-7857-47f1-ba6e-56be1804830a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.994735] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 73156235-1b13-4fda-8957-ed8cd88ceb43/73156235-1b13-4fda-8957-ed8cd88ceb43.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1686.995079] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f687ab21-01c0-4131-8104-6ef87b45115d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.017770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.020476] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1687.020476] env[62525]: value = "task-1781759" [ 1687.020476] env[62525]: _type = "Task" [ 1687.020476] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.030131] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.044688] env[62525]: DEBUG nova.compute.manager [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1687.046010] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d566dda-4e27-46c3-8d92-6547d6f0d373 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.138447] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528cafa8-e056-16d7-d66b-6f6d17cdcfa4, 'name': SearchDatastore_Task, 'duration_secs': 0.017677} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.139359] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6672a6fa-3857-4c01-8d67-c7093469cf79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.146511] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1687.146511] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c1c170-c20d-af45-3fd8-591b393bc0d1" [ 1687.146511] env[62525]: _type = "Task" [ 1687.146511] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.155303] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1c170-c20d-af45-3fd8-591b393bc0d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.188438] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2860b54-2841-4b5c-84a9-d781ad7d228a req-a606452a-2042-4e22-9587-d69a6b899281 service nova] Releasing lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.207505] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1687.244333] env[62525]: DEBUG nova.network.neutron [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Successfully updated port: 6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.246106] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 3b1a825f-b6a5-4822-86a5-57972f34748c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1687.275912] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.276232] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1687.276414] env[62525]: DEBUG nova.compute.manager [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing instance network info cache due to event network-changed-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1687.276673] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquiring lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.276853] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Acquired lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.277056] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Refreshing network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1687.355268] env[62525]: DEBUG nova.compute.manager [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Received event network-vif-plugged-d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1687.355268] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Acquiring lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.355268] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.355268] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.355411] env[62525]: DEBUG nova.compute.manager [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] No waiting events found dispatching network-vif-plugged-d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1687.355650] env[62525]: WARNING nova.compute.manager [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Received unexpected event network-vif-plugged-d94efd19-31bc-4597-9716-352a3f25ecbf for instance with vm_state building and task_state spawning. [ 1687.355820] env[62525]: DEBUG nova.compute.manager [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Received event network-changed-d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1687.356039] env[62525]: DEBUG nova.compute.manager [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Refreshing instance network info cache due to event network-changed-d94efd19-31bc-4597-9716-352a3f25ecbf. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1687.356152] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Acquiring lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.391925] env[62525]: DEBUG nova.network.neutron [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Updating instance_info_cache with network_info: [{"id": "d94efd19-31bc-4597-9716-352a3f25ecbf", "address": "fa:16:3e:0e:fa:21", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94efd19-31", "ovs_interfaceid": "d94efd19-31bc-4597-9716-352a3f25ecbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.455340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.530864] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781759, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.562685] env[62525]: DEBUG oslo_concurrency.lockutils [None req-39c06b8c-7e36-4476-b1ea-6ad05fc96a57 tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.731s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.657205] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1c170-c20d-af45-3fd8-591b393bc0d1, 'name': SearchDatastore_Task, 'duration_secs': 0.019896} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.657435] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.657648] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6/5b1a2a46-df4d-41c6-a750-9ec3c75e57f6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1687.657924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.658129] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1687.658346] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23c5ffeb-21c3-43c0-af0c-47f63618ce59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.660444] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7d09043-98ad-487c-b2d4-018fe7f677a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.669264] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1687.669264] env[62525]: value = "task-1781760" [ 1687.669264] env[62525]: _type = "Task" [ 1687.669264] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.673564] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1687.673730] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1687.674804] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e885bac2-dede-431f-add4-684adaf104c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.680273] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781760, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.683449] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1687.683449] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526f7e47-cbec-904f-7a58-d14deb2aff2b" [ 1687.683449] env[62525]: _type = "Task" [ 1687.683449] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.691639] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526f7e47-cbec-904f-7a58-d14deb2aff2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.722676] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1687.747590] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1687.747878] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1687.748067] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1687.748281] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1687.748442] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1687.748629] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1687.748864] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1687.749057] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1687.749350] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1687.749640] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1687.749935] env[62525]: DEBUG nova.virt.hardware [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1687.750901] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8abf0305-2000-4ffe-aa88-e2b355383ea3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1687.751068] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Migration 52a5775e-aa5d-4847-8a57-847f128e703a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1687.751211] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 462bc19d-1eaa-4c57-8ebb-412a97614f03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1687.751481] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1687.751665] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4160MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1687.755350] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.755990] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.755990] env[62525]: DEBUG nova.network.neutron [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1687.757534] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e818ff-ee5d-4c1e-b9af-cdc7f0925879 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.767862] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3efe4b0-a3c7-4eb4-9a78-81deef401f00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.894463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.894812] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Instance network_info: |[{"id": "d94efd19-31bc-4597-9716-352a3f25ecbf", "address": "fa:16:3e:0e:fa:21", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94efd19-31", "ovs_interfaceid": "d94efd19-31bc-4597-9716-352a3f25ecbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1687.895147] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Acquired lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.895380] env[62525]: DEBUG nova.network.neutron [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Refreshing network info cache for port d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1687.896609] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:fa:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '537e0890-4fa2-4f2d-b74c-49933a4edf53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd94efd19-31bc-4597-9716-352a3f25ecbf', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1687.908598] env[62525]: DEBUG oslo.service.loopingcall [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1687.916858] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1687.917633] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71cf1cd7-e3cd-4a15-9809-127dad78f9b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.942329] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1687.942329] env[62525]: value = "task-1781761" [ 1687.942329] env[62525]: _type = "Task" [ 1687.942329] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.953047] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781761, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.034762] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781759, 'name': ReconfigVM_Task, 'duration_secs': 0.833581} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.035144] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 73156235-1b13-4fda-8957-ed8cd88ceb43/73156235-1b13-4fda-8957-ed8cd88ceb43.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1688.035937] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6aab497c-0db6-48af-9318-afb4c37aff09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.045021] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1688.045021] env[62525]: value = "task-1781762" [ 1688.045021] env[62525]: _type = "Task" [ 1688.045021] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.056982] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781762, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.104947] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa6dcad-f093-44f9-b222-d19708b8e801 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.116772] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e76812d-7082-4bf0-a6dc-457440773d4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.156768] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530b7206-b677-43cb-9899-c9a1f9f8480d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.167739] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e70ef1-4633-4746-b738-1b7d85a5c9be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.186283] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1688.195575] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781760, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.202930] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526f7e47-cbec-904f-7a58-d14deb2aff2b, 'name': SearchDatastore_Task, 'duration_secs': 0.023952} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.206541] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b88eac94-620c-41c8-b8ac-9b54aacac0bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.213093] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1688.213093] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52295c7b-9c40-5aa5-d9eb-4d958a034e9a" [ 1688.213093] env[62525]: _type = "Task" [ 1688.213093] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.221741] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52295c7b-9c40-5aa5-d9eb-4d958a034e9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.226941] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updated VIF entry in instance network info cache for port 91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1688.227420] env[62525]: DEBUG nova.network.neutron [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [{"id": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "address": "fa:16:3e:fe:62:78", "network": {"id": "d845541d-f268-4bc1-a869-88894faaa2d7", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-570261349-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "099851107d594ed39cef954e6e6e87b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91b445d1-b4", "ovs_interfaceid": "91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.307511] env[62525]: WARNING nova.network.neutron [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1688.322728] env[62525]: DEBUG nova.network.neutron [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Updated VIF entry in instance network info cache for port d94efd19-31bc-4597-9716-352a3f25ecbf. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1688.323209] env[62525]: DEBUG nova.network.neutron [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Updating instance_info_cache with network_info: [{"id": "d94efd19-31bc-4597-9716-352a3f25ecbf", "address": "fa:16:3e:0e:fa:21", "network": {"id": "bb870dec-79cf-4481-89a8-329b92ae55af", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2073315772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3263280a4a14e87ac174d07c5dcb443", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "537e0890-4fa2-4f2d-b74c-49933a4edf53", "external-id": "nsx-vlan-transportzone-82", "segmentation_id": 82, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94efd19-31", "ovs_interfaceid": "d94efd19-31bc-4597-9716-352a3f25ecbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.375464] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Successfully updated port: 713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1688.451710] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781761, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.557596] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781762, 'name': Rename_Task, 'duration_secs': 0.36368} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.559731] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1688.560013] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5dc1bb25-bec1-478c-a102-31dc12315f87 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.568651] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1688.568651] env[62525]: value = "task-1781763" [ 1688.568651] env[62525]: _type = "Task" [ 1688.568651] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.576227] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.683548] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781760, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599697} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.685902] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6/5b1a2a46-df4d-41c6-a750-9ec3c75e57f6.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1688.686102] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1688.686366] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53a3a266-d5a1-4769-b69c-6c00f46e61e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.695853] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1688.695853] env[62525]: value = "task-1781764" [ 1688.695853] env[62525]: _type = "Task" [ 1688.695853] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.704507] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.710010] env[62525]: ERROR nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [req-4f2be6fc-4fa4-4230-879c-3954030eeb19] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4f2be6fc-4fa4-4230-879c-3954030eeb19"}]} [ 1688.726381] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52295c7b-9c40-5aa5-d9eb-4d958a034e9a, 'name': SearchDatastore_Task, 'duration_secs': 0.040456} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.727353] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1688.729267] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.729559] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c96a9ef9-0ef7-41a2-bb0f-531f82980eb8/c96a9ef9-0ef7-41a2-bb0f-531f82980eb8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1688.730582] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd37a3ed-7b52-40a1-994e-95e6e56a8452 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.736874] env[62525]: DEBUG oslo_concurrency.lockutils [req-f2e9fb05-46db-4f56-ac7c-5eaa6c6e0a23 req-53c186f7-6e18-47cf-822b-8d1862e2db46 service nova] Releasing lock "refresh_cache-0067de08-6708-4c7c-a83a-ed9df193d5cd" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.738603] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1688.738603] env[62525]: value = "task-1781765" [ 1688.738603] env[62525]: _type = "Task" [ 1688.738603] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.739625] env[62525]: DEBUG nova.network.neutron [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c93e506-f746-4d2e-922a-f389df5494a8", "address": "fa:16:3e:8f:6e:f1", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93e506-f7", "ovs_interfaceid": "6c93e506-f746-4d2e-922a-f389df5494a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.746656] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1688.746873] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1688.752126] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.762208] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1688.780600] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1688.825784] env[62525]: DEBUG oslo_concurrency.lockutils [req-a21adee6-5f64-4109-91e8-ad3c66bb9586 req-eafd6ce9-0889-4827-ad6f-e4cebb939516 service nova] Releasing lock "refresh_cache-c75091c3-45d2-4c71-b2ad-d38e8a449624" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.879972] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.879972] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.879972] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1688.954943] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781761, 'name': CreateVM_Task, 'duration_secs': 0.850727} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.955317] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1688.955863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.956045] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.956381] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1688.957102] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cf1eef6-2772-4a2e-acd2-7d7e7f6af3bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.962424] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1688.962424] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fa73ed-b5b7-caf1-b26b-7c56a3722ad4" [ 1688.962424] env[62525]: _type = "Task" [ 1688.962424] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.973878] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fa73ed-b5b7-caf1-b26b-7c56a3722ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.079748] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781763, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.081906] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c608f90-bda5-4724-abd6-98670ae9862f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.091214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b892f91a-0646-4f82-83d1-320f400eb461 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.123612] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f73fa5-7176-49fe-a440-e0fd790c04ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.132719] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6f0f08-c80b-4da2-9622-9febacdf25ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.149286] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1689.205789] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064458} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.206132] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1689.206953] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9056b912-2a69-49ba-9eef-24eb2b1dd891 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.232533] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6/5b1a2a46-df4d-41c6-a750-9ec3c75e57f6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1689.232897] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e75e3c03-7124-433a-8947-413b14f91d37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.247586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.248268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.248430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.252525] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd5e7ba-8e1e-436a-bd7e-a7569bd09ee7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.260611] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781765, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.273364] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1689.273612] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1689.273752] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1689.273933] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1689.274092] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1689.274242] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1689.274443] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1689.274603] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1689.274771] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1689.274933] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1689.275121] env[62525]: DEBUG nova.virt.hardware [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1689.281441] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfiguring VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1689.281862] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1689.281862] env[62525]: value = "task-1781766" [ 1689.281862] env[62525]: _type = "Task" [ 1689.281862] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.282096] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6b06f36-dd15-44c7-92a4-cdf4dcd6d3a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.303458] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.304867] env[62525]: DEBUG oslo_vmware.api [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1689.304867] env[62525]: value = "task-1781767" [ 1689.304867] env[62525]: _type = "Task" [ 1689.304867] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.312539] env[62525]: DEBUG oslo_vmware.api [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781767, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.429170] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1689.475367] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fa73ed-b5b7-caf1-b26b-7c56a3722ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.0117} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.475707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.475953] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1689.476303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.476553] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.476858] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1689.477255] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99d8fdac-a048-4193-a628-177c387507f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.501572] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1689.501815] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1689.502655] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bbf7b21-23d7-43b4-a147-46493c42f1a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.508975] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1689.508975] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223ec8a-5a3a-e78f-c28b-6ba9294930d2" [ 1689.508975] env[62525]: _type = "Task" [ 1689.508975] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.516779] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1689.517039] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.517322] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.517509] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.517680] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] No waiting events found dispatching network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1689.517846] env[62525]: WARNING nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received unexpected event network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 for instance with vm_state active and task_state None. [ 1689.518010] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1689.518171] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-6c93e506-f746-4d2e-922a-f389df5494a8. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1689.518350] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.518481] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.518647] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port 6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.531643] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5223ec8a-5a3a-e78f-c28b-6ba9294930d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.579628] env[62525]: DEBUG oslo_vmware.api [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781763, 'name': PowerOnVM_Task, 'duration_secs': 0.831598} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.579920] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.580157] env[62525]: INFO nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1689.580348] env[62525]: DEBUG nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1689.581154] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e24f7c-5fd0-48da-9476-e8238f7c0e87 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.644863] env[62525]: DEBUG nova.network.neutron [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Updating instance_info_cache with network_info: [{"id": "713f3c8d-70c6-4226-9362-57f90126f716", "address": "fa:16:3e:b5:94:94", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap713f3c8d-70", "ovs_interfaceid": "713f3c8d-70c6-4226-9362-57f90126f716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.685021] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1689.685021] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 108 to 109 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1689.685021] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1689.757593] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781765, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552804} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.757941] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c96a9ef9-0ef7-41a2-bb0f-531f82980eb8/c96a9ef9-0ef7-41a2-bb0f-531f82980eb8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1689.758184] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1689.758486] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c3e35fa-5cf4-4466-837f-1f8ee887f810 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.765223] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1689.765223] env[62525]: value = "task-1781768" [ 1689.765223] env[62525]: _type = "Task" [ 1689.765223] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.774708] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.804885] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.813287] env[62525]: DEBUG oslo_vmware.api [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.018894] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5223ec8a-5a3a-e78f-c28b-6ba9294930d2, 'name': SearchDatastore_Task, 'duration_secs': 0.030425} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.020198] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae96aad6-41f3-47dc-af24-6d6bcf13f6e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.027983] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1690.027983] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52faa58f-a0a9-d54d-b757-c5021f83d15b" [ 1690.027983] env[62525]: _type = "Task" [ 1690.027983] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.035707] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52faa58f-a0a9-d54d-b757-c5021f83d15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.109026] env[62525]: INFO nova.compute.manager [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Took 22.07 seconds to build instance. [ 1690.148305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.148612] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Instance network_info: |[{"id": "713f3c8d-70c6-4226-9362-57f90126f716", "address": "fa:16:3e:b5:94:94", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap713f3c8d-70", "ovs_interfaceid": "713f3c8d-70c6-4226-9362-57f90126f716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1690.149140] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:94:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '713f3c8d-70c6-4226-9362-57f90126f716', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1690.157315] env[62525]: DEBUG oslo.service.loopingcall [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1690.159978] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1690.160238] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-919cfb9b-9620-41e9-81e5-1ddb4fd83f84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.180525] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1690.180525] env[62525]: value = "task-1781769" [ 1690.180525] env[62525]: _type = "Task" [ 1690.180525] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.188804] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1690.188999] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.487s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.189204] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781769, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.189445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.357s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.190916] env[62525]: INFO nova.compute.claims [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1690.193320] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.197028] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1690.279195] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081514} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.281395] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1690.282733] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8111119-5d4a-43d3-aba5-2402f15e3ed8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.305070] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] c96a9ef9-0ef7-41a2-bb0f-531f82980eb8/c96a9ef9-0ef7-41a2-bb0f-531f82980eb8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1690.308619] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1f8a7c7-2b98-427d-b275-6a3402405aee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.332710] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781766, 'name': ReconfigVM_Task, 'duration_secs': 0.685272} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.336766] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6/5b1a2a46-df4d-41c6-a750-9ec3c75e57f6.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1690.337490] env[62525]: DEBUG oslo_vmware.api [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.337767] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1690.337767] env[62525]: value = "task-1781770" [ 1690.337767] env[62525]: _type = "Task" [ 1690.337767] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.337983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebeab181-0b2d-43c5-ab43-16b6d06d2717 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.346367] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1690.346367] env[62525]: value = "task-1781771" [ 1690.346367] env[62525]: _type = "Task" [ 1690.346367] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.351016] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781770, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.357378] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781771, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.382770] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updated VIF entry in instance network info cache for port 6c93e506-f746-4d2e-922a-f389df5494a8. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.383213] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c93e506-f746-4d2e-922a-f389df5494a8", "address": "fa:16:3e:8f:6e:f1", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93e506-f7", "ovs_interfaceid": "6c93e506-f746-4d2e-922a-f389df5494a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.512301] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bebef5c-c283-4751-b75f-9333622244ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.520391] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Suspending the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1690.520659] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3887f788-8f62-45e7-b548-451b805933ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.527026] env[62525]: DEBUG oslo_vmware.api [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1690.527026] env[62525]: value = "task-1781772" [ 1690.527026] env[62525]: _type = "Task" [ 1690.527026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.540106] env[62525]: DEBUG oslo_vmware.api [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781772, 'name': SuspendVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.544796] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52faa58f-a0a9-d54d-b757-c5021f83d15b, 'name': SearchDatastore_Task, 'duration_secs': 0.029048} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.545154] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.545472] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c75091c3-45d2-4c71-b2ad-d38e8a449624/c75091c3-45d2-4c71-b2ad-d38e8a449624.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1690.545815] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6762c000-5c79-4b8f-8156-fb47a8add596 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.555275] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1690.555275] env[62525]: value = "task-1781773" [ 1690.555275] env[62525]: _type = "Task" [ 1690.555275] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.566704] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.611528] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f8d9c147-a097-4a92-8fcb-e461096a4e50 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.577s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.691230] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781769, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.709575] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] There are 44 instances to clean {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1690.709784] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 2ad723ff-6540-4bb4-b09e-52e6a9fb12b9] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1690.821201] env[62525]: DEBUG oslo_vmware.api [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781767, 'name': ReconfigVM_Task, 'duration_secs': 1.42317} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.821776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.822014] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfigured VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1690.854649] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.866636] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781771, 'name': Rename_Task, 'duration_secs': 0.230892} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.866915] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1690.867218] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f6c8f99-d8bf-46ab-ae1c-08eb3f0d1b5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.873929] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1690.873929] env[62525]: value = "task-1781774" [ 1690.873929] env[62525]: _type = "Task" [ 1690.873929] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.882621] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781774, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.886257] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.886507] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Received event network-vif-plugged-713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1690.886721] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquiring lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.886924] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.887074] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.887244] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] No waiting events found dispatching network-vif-plugged-713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1690.887409] env[62525]: WARNING nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Received unexpected event network-vif-plugged-713f3c8d-70c6-4226-9362-57f90126f716 for instance with vm_state building and task_state spawning. [ 1690.887570] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Received event network-changed-713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1690.887805] env[62525]: DEBUG nova.compute.manager [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Refreshing instance network info cache due to event network-changed-713f3c8d-70c6-4226-9362-57f90126f716. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1690.888018] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquiring lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.888166] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Acquired lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.888323] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Refreshing network info cache for port 713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1691.040145] env[62525]: DEBUG oslo_vmware.api [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781772, 'name': SuspendVM_Task} progress is 58%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.066254] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781773, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.195128] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781769, 'name': CreateVM_Task, 'duration_secs': 0.767825} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.195300] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1691.196195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.196420] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.196842] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1691.197197] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc16874b-2b7d-4362-9c7a-3e33869996ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.203381] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1691.203381] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5248936b-cdf0-5e7a-6bc7-d37fe20a7b95" [ 1691.203381] env[62525]: _type = "Task" [ 1691.203381] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.217448] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 3ef2dbbe-0cf3-4098-91d8-e206a872bd08] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1691.219449] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5248936b-cdf0-5e7a-6bc7-d37fe20a7b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.328028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7e278bb7-2122-422d-a593-3404ac496aa3 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.005s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.352313] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781770, 'name': ReconfigVM_Task, 'duration_secs': 0.618716} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.352616] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfigured VM instance instance-00000057 to attach disk [datastore1] c96a9ef9-0ef7-41a2-bb0f-531f82980eb8/c96a9ef9-0ef7-41a2-bb0f-531f82980eb8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1691.353477] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47c989ee-bb21-4291-bbb6-fe415e3c50d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.360199] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1691.360199] env[62525]: value = "task-1781775" [ 1691.360199] env[62525]: _type = "Task" [ 1691.360199] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.369502] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781775, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.386242] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781774, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.499342] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "73156235-1b13-4fda-8957-ed8cd88ceb43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.499670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.499974] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.500248] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.500499] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.502839] env[62525]: INFO nova.compute.manager [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Terminating instance [ 1691.504744] env[62525]: DEBUG nova.compute.manager [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1691.505039] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.506263] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f327f1a0-6bf4-415e-8925-9780bf99c20b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.524419] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.524419] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a7dc2e6-272a-4cff-94ad-4dca9a13a237 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.531022] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1691.531022] env[62525]: value = "task-1781776" [ 1691.531022] env[62525]: _type = "Task" [ 1691.531022] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.545786] env[62525]: DEBUG oslo_vmware.api [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781772, 'name': SuspendVM_Task, 'duration_secs': 0.893121} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.549717] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Suspended the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1691.549925] env[62525]: DEBUG nova.compute.manager [None req-9f5f5db1-452c-4de9-a618-b326dbb896cc tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1691.550290] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.552233] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5b7d52-6576-4fa0-9eb4-68a69eb4398f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.556175] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144b05ea-f073-4500-8071-7e44caa464dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.575792] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f81aae-8c0a-430e-9697-df322d0ad89d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.586911] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760752} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.587738] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c75091c3-45d2-4c71-b2ad-d38e8a449624/c75091c3-45d2-4c71-b2ad-d38e8a449624.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1691.588058] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1691.588381] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3dd6907-9799-4d9f-9ba4-53019dc20a5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.636778] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205aaa0c-d0bd-4d73-bbd4-f71847953e12 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.643168] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1691.643168] env[62525]: value = "task-1781777" [ 1691.643168] env[62525]: _type = "Task" [ 1691.643168] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.654775] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4b04f2-e836-4b92-8452-8976521271f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.666506] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.681165] env[62525]: DEBUG nova.compute.provider_tree [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1691.716277] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5248936b-cdf0-5e7a-6bc7-d37fe20a7b95, 'name': SearchDatastore_Task, 'duration_secs': 0.049144} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.716586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.716900] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1691.717129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.717279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.717458] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.717800] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d168c15-f731-4415-b414-ed19e2b7d12a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.721194] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 4e52e21e-4db3-45e5-b88d-455d1b8ea5c8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1691.727941] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.728094] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1691.728823] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aee00e26-8001-4170-be50-9f88c591deaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.734853] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1691.734853] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521a9ba9-5196-0132-7699-e0f8ecd570d9" [ 1691.734853] env[62525]: _type = "Task" [ 1691.734853] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.743938] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a9ba9-5196-0132-7699-e0f8ecd570d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.754108] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Updated VIF entry in instance network info cache for port 713f3c8d-70c6-4226-9362-57f90126f716. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1691.754465] env[62525]: DEBUG nova.network.neutron [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Updating instance_info_cache with network_info: [{"id": "713f3c8d-70c6-4226-9362-57f90126f716", "address": "fa:16:3e:b5:94:94", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap713f3c8d-70", "ovs_interfaceid": "713f3c8d-70c6-4226-9362-57f90126f716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.873476] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781775, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.889495] env[62525]: DEBUG oslo_vmware.api [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781774, 'name': PowerOnVM_Task, 'duration_secs': 0.972736} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.889790] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1691.890024] env[62525]: INFO nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Took 14.01 seconds to spawn the instance on the hypervisor. [ 1691.890215] env[62525]: DEBUG nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1691.891063] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e88cb0e-4817-460f-b771-f7b28a90b025 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.043271] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781776, 'name': PowerOffVM_Task, 'duration_secs': 0.489252} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.043777] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1692.043777] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1692.043935] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8c71fb2-915c-47ed-aa53-dd009df18f27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.130377] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.130594] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.130785] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Deleting the datastore file [datastore1] 73156235-1b13-4fda-8957-ed8cd88ceb43 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.131221] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e22158c-6c91-46a6-80ec-d1f12509c611 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.138231] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for the task: (returnval){ [ 1692.138231] env[62525]: value = "task-1781779" [ 1692.138231] env[62525]: _type = "Task" [ 1692.138231] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.145934] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.152985] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111685} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.153258] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1692.154028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6ae1f7-c894-4f69-bb66-40b0fe8bd403 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.176072] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] c75091c3-45d2-4c71-b2ad-d38e8a449624/c75091c3-45d2-4c71-b2ad-d38e8a449624.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1692.176388] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a9c8947-b577-40d3-8354-e2e83a1ed54d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.193122] env[62525]: DEBUG nova.scheduler.client.report [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1692.203875] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1692.203875] env[62525]: value = "task-1781780" [ 1692.203875] env[62525]: _type = "Task" [ 1692.203875] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.212442] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.224092] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6ddbffdc-c7ec-442b-99db-3f44fbbd1ab7] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1692.244681] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a9ba9-5196-0132-7699-e0f8ecd570d9, 'name': SearchDatastore_Task, 'duration_secs': 0.013925} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.245440] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4812512e-fed7-476c-ae3d-48b0a8adad04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.251170] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1692.251170] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f997b9-75b7-b800-34f1-0efb419a5283" [ 1692.251170] env[62525]: _type = "Task" [ 1692.251170] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.257173] env[62525]: DEBUG oslo_concurrency.lockutils [req-3bce88b7-9166-42ff-b51c-a50d7d5eccb4 req-139c1d2f-56a9-4154-863f-5ff71cdb845d service nova] Releasing lock "refresh_cache-4278fbb1-d2bd-4e92-aaca-260d40aa26b1" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.262223] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f997b9-75b7-b800-34f1-0efb419a5283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.371795] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781775, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.407972] env[62525]: INFO nova.compute.manager [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Took 26.12 seconds to build instance. [ 1692.651082] env[62525]: DEBUG oslo_vmware.api [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Task: {'id': task-1781779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.40735} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.651082] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1692.651082] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1692.651082] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1692.651082] env[62525]: INFO nova.compute.manager [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1692.651082] env[62525]: DEBUG oslo.service.loopingcall [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.651082] env[62525]: DEBUG nova.compute.manager [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1692.651082] env[62525]: DEBUG nova.network.neutron [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1692.702166] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.702747] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1692.707153] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.567s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.708741] env[62525]: INFO nova.compute.claims [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1692.721503] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.729155] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 80cb1874-2fc8-41ef-b1af-da308f32a2b0] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1692.764174] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f997b9-75b7-b800-34f1-0efb419a5283, 'name': SearchDatastore_Task, 'duration_secs': 0.038} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.764174] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.764392] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4278fbb1-d2bd-4e92-aaca-260d40aa26b1/4278fbb1-d2bd-4e92-aaca-260d40aa26b1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1692.768018] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fc897e1-a104-487b-a865-39df87a27169 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.773060] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1692.773060] env[62525]: value = "task-1781781" [ 1692.773060] env[62525]: _type = "Task" [ 1692.773060] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.781920] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.872513] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781775, 'name': Rename_Task, 'duration_secs': 1.231094} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.872797] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1692.873571] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e67e1ec-e47e-4ca0-86c9-9e01129a7bdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.880505] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1692.880505] env[62525]: value = "task-1781782" [ 1692.880505] env[62525]: _type = "Task" [ 1692.880505] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.891646] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.912886] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c58ddf09-40ef-4bd8-82c2-d1aae9c67540 tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.639s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.048862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.049359] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.134106] env[62525]: INFO nova.compute.manager [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Resuming [ 1693.134817] env[62525]: DEBUG nova.objects.instance [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'flavor' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.202723] env[62525]: DEBUG nova.compute.manager [req-6368c3f1-da79-4b84-ac51-b69a8bcf79a7 req-533044bd-aef9-4c10-a075-5e79351e3448 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Received event network-vif-deleted-a9ec3613-8b89-413b-831b-896e679be20d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1693.202941] env[62525]: INFO nova.compute.manager [req-6368c3f1-da79-4b84-ac51-b69a8bcf79a7 req-533044bd-aef9-4c10-a075-5e79351e3448 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Neutron deleted interface a9ec3613-8b89-413b-831b-896e679be20d; detaching it from the instance and deleting it from the info cache [ 1693.203151] env[62525]: DEBUG nova.network.neutron [req-6368c3f1-da79-4b84-ac51-b69a8bcf79a7 req-533044bd-aef9-4c10-a075-5e79351e3448 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.208649] env[62525]: DEBUG nova.compute.utils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1693.210780] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1693.211789] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1693.228045] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781780, 'name': ReconfigVM_Task, 'duration_secs': 0.680533} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.228930] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Reconfigured VM instance instance-00000058 to attach disk [datastore1] c75091c3-45d2-4c71-b2ad-d38e8a449624/c75091c3-45d2-4c71-b2ad-d38e8a449624.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1693.229705] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f5aa2cd-212b-444d-8cb8-8ddca8c6e536 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.234854] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: ad6179ad-bafb-42e7-932c-2aa4a5972c44] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1693.238471] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.238719] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.238925] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.239136] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.239724] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.240913] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1693.240913] env[62525]: value = "task-1781783" [ 1693.240913] env[62525]: _type = "Task" [ 1693.240913] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.241375] env[62525]: INFO nova.compute.manager [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Terminating instance [ 1693.244064] env[62525]: DEBUG nova.compute.manager [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1693.244262] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1693.248483] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b7fc14-895d-4a48-8563-fbf23dc5f943 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.256806] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781783, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.258985] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1693.259532] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3723829-fe91-42a0-955e-fb94f136c716 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.263404] env[62525]: DEBUG nova.policy [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83bf79d024f345a9a8c02004f8cefbaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab7fca262814290a975bf85badc9b71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1693.271483] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1693.271483] env[62525]: value = "task-1781784" [ 1693.271483] env[62525]: _type = "Task" [ 1693.271483] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.287085] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500208} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.287345] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.287613] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4278fbb1-d2bd-4e92-aaca-260d40aa26b1/4278fbb1-d2bd-4e92-aaca-260d40aa26b1.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1693.287839] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1693.288122] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27119a18-3c16-41ec-8aca-fe342edbfb6b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.295339] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1693.295339] env[62525]: value = "task-1781785" [ 1693.295339] env[62525]: _type = "Task" [ 1693.295339] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.305706] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.395017] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781782, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.552375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.552652] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.553461] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dc1320-ea2d-4ea0-9380-739a25fc6a5b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.573119] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ce56de-c1b6-4e99-9352-3295da5b6fb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.602176] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfiguring VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1693.602503] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a68c787d-c250-4815-b1f7-61849a2793d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.621937] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1693.621937] env[62525]: value = "task-1781786" [ 1693.621937] env[62525]: _type = "Task" [ 1693.621937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.625151] env[62525]: DEBUG nova.network.neutron [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.631959] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.706742] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a748130-e978-404b-a3ea-a88573421fee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.711780] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1693.722017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2705ed63-7b79-42ea-bb05-b8bc18cc646c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.743691] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 16667060-2172-4c1b-a3c8-340bb38846cf] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1693.762391] env[62525]: DEBUG nova.compute.manager [req-6368c3f1-da79-4b84-ac51-b69a8bcf79a7 req-533044bd-aef9-4c10-a075-5e79351e3448 service nova] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Detach interface failed, port_id=a9ec3613-8b89-413b-831b-896e679be20d, reason: Instance 73156235-1b13-4fda-8957-ed8cd88ceb43 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1693.773138] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781783, 'name': Rename_Task, 'duration_secs': 0.151707} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.775625] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1693.775955] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15c2dfee-69c8-4839-b461-9dd6a9ccdc82 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.784757] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781784, 'name': PowerOffVM_Task, 'duration_secs': 0.182493} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.785933] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1693.786229] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1693.786508] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1693.786508] env[62525]: value = "task-1781787" [ 1693.786508] env[62525]: _type = "Task" [ 1693.786508] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.786725] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89311229-0319-49a3-ba10-8ac05b1850b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.795517] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Successfully created port: 3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1693.806122] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.811829] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100645} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.812152] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1693.813130] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9349bdb1-4b4a-482e-86ce-32bfb77ff050 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.837623] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 4278fbb1-d2bd-4e92-aaca-260d40aa26b1/4278fbb1-d2bd-4e92-aaca-260d40aa26b1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1693.837999] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2eb12be-9469-40ff-91d2-d3b925c582d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.861279] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1693.861279] env[62525]: value = "task-1781789" [ 1693.861279] env[62525]: _type = "Task" [ 1693.861279] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.872060] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781789, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.892834] env[62525]: DEBUG oslo_vmware.api [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781782, 'name': PowerOnVM_Task, 'duration_secs': 0.806131} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.896229] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1693.896470] env[62525]: INFO nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Took 11.13 seconds to spawn the instance on the hypervisor. [ 1693.896666] env[62525]: DEBUG nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1693.898045] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8917391-3107-4781-af89-e15ca416fda0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.945918] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1693.945918] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1693.945918] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleting the datastore file [datastore1] 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1693.945918] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b96d2c11-04de-4d3c-9ffd-96c01fa7c370 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.955446] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for the task: (returnval){ [ 1693.955446] env[62525]: value = "task-1781790" [ 1693.955446] env[62525]: _type = "Task" [ 1693.955446] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.966017] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.102660] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2133b80a-a5e3-43af-9f4a-533819e94fcf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.110509] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e968b0-881c-4f2a-9959-526904725878 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.142064] env[62525]: INFO nova.compute.manager [-] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Took 1.49 seconds to deallocate network for instance. [ 1694.147246] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834155c2-4bdc-4367-ad5b-661de0fae01f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.153150] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.153338] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquired lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.153936] env[62525]: DEBUG nova.network.neutron [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1694.161577] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.163319] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a6a1c8-cd50-4e73-a63b-13052b4f5563 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.179926] env[62525]: DEBUG nova.compute.provider_tree [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.266732] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 808491cc-b195-4e81-afa5-86bd6ed8cb25] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1694.298528] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781787, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.374503] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781789, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.419908] env[62525]: INFO nova.compute.manager [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Took 26.14 seconds to build instance. [ 1694.466343] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.652308] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.656779] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.683210] env[62525]: DEBUG nova.scheduler.client.report [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1694.724735] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1694.760584] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1694.760840] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1694.761009] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1694.761190] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1694.761336] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1694.761480] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1694.761683] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1694.762349] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1694.762349] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1694.762349] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1694.762510] env[62525]: DEBUG nova.virt.hardware [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1694.763398] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125da9c7-42cb-4b68-8e28-33b4fa0c0637 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.770661] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 024c7393-de18-4c76-a27e-757710824494] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1694.773724] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0883a6-1138-4e0e-8b99-da08759cf2a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.802188] env[62525]: DEBUG oslo_vmware.api [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781787, 'name': PowerOnVM_Task, 'duration_secs': 0.878282} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.802532] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1694.802777] env[62525]: INFO nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Took 9.61 seconds to spawn the instance on the hypervisor. [ 1694.803008] env[62525]: DEBUG nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1694.803797] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3804ca-ae28-4c7e-b98a-84e0ce7339da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.874238] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781789, 'name': ReconfigVM_Task, 'duration_secs': 0.700466} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.874575] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 4278fbb1-d2bd-4e92-aaca-260d40aa26b1/4278fbb1-d2bd-4e92-aaca-260d40aa26b1.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1694.875263] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07088de0-3dfa-4cbb-a42f-bfaea6390b77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.883260] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1694.883260] env[62525]: value = "task-1781791" [ 1694.883260] env[62525]: _type = "Task" [ 1694.883260] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.891564] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781791, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.922854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f02ebf5-f5b7-4b66-b22d-ca759e5f5626 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.645s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.966834] env[62525]: DEBUG oslo_vmware.api [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Task: {'id': task-1781790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529429} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.967160] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1694.967362] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1694.967530] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1694.967703] env[62525]: INFO nova.compute.manager [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1694.968133] env[62525]: DEBUG oslo.service.loopingcall [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.968265] env[62525]: DEBUG nova.compute.manager [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1694.968359] env[62525]: DEBUG nova.network.neutron [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1695.153314] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.188344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.188882] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1695.191649] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 8.174s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.192194] env[62525]: DEBUG nova.objects.instance [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1695.282561] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 81fbb354-21f2-43f0-8aa3-e80e10235326] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1695.311763] env[62525]: DEBUG nova.network.neutron [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [{"id": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "address": "fa:16:3e:94:5d:a4", "network": {"id": "e51190f0-269a-4f3f-9785-0b6940acc0a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-108866140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5258cd7a314fc784be2d2e33e6eceb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc247c87-0d", "ovs_interfaceid": "dc247c87-0d2d-47bf-9d66-5e81d9237fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.323346] env[62525]: INFO nova.compute.manager [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Took 26.22 seconds to build instance. [ 1695.395165] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781791, 'name': Rename_Task, 'duration_secs': 0.2557} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.395540] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.395891] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56f2b8b8-d320-40ed-acb6-420a44dd87b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.404478] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1695.404478] env[62525]: value = "task-1781792" [ 1695.404478] env[62525]: _type = "Task" [ 1695.404478] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.413616] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781792, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.452184] env[62525]: DEBUG nova.compute.manager [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Received event network-changed-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.452184] env[62525]: DEBUG nova.compute.manager [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Refreshing instance network info cache due to event network-changed-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1695.452184] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] Acquiring lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.452184] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] Acquired lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.452184] env[62525]: DEBUG nova.network.neutron [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Refreshing network info cache for port 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1695.656175] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.698357] env[62525]: DEBUG nova.compute.utils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1695.698357] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1695.706029] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1695.766140] env[62525]: DEBUG nova.policy [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '391b41cf09fd42879d3f5cd3153c2045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a46df22dac6f473b8395f9302c3a4a75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1695.770575] env[62525]: INFO nova.compute.manager [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Rebuilding instance [ 1695.792320] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6be49426-ddda-461e-908f-593c0904b129] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1695.800336] env[62525]: DEBUG nova.compute.manager [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Received event network-vif-plugged-3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.800336] env[62525]: DEBUG oslo_concurrency.lockutils [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.800336] env[62525]: DEBUG oslo_concurrency.lockutils [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.800336] env[62525]: DEBUG oslo_concurrency.lockutils [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.800336] env[62525]: DEBUG nova.compute.manager [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] No waiting events found dispatching network-vif-plugged-3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1695.800336] env[62525]: WARNING nova.compute.manager [req-a83daaea-47dd-4a53-93f2-68003137fd79 req-ad6745d0-8e8d-469f-8659-7fad4072b11c service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Received unexpected event network-vif-plugged-3ddb902b-b001-40bc-b635-a885589b1573 for instance with vm_state building and task_state spawning. [ 1695.815461] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Releasing lock "refresh_cache-e8586018-100e-4729-97fc-98effa87cd9e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.816477] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfa8af3-41ad-4d7d-82bd-ebd003491481 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.823827] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Resuming the VM {{(pid=62525) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1695.827104] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62d169db-f3ef-4e02-8306-e3afc669a2cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.827104] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1265c80-e0dd-4bc7-8a13-42b760b99ab5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.732s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.834723] env[62525]: DEBUG oslo_vmware.api [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1695.834723] env[62525]: value = "task-1781793" [ 1695.834723] env[62525]: _type = "Task" [ 1695.834723] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.835116] env[62525]: DEBUG nova.compute.manager [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1695.835961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bcd126-7cbe-4a82-a8e1-f9ad143896ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.855124] env[62525]: DEBUG oslo_vmware.api [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.916807] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781792, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.957574] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Successfully updated port: 3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1696.117209] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Successfully created port: ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1696.140858] env[62525]: DEBUG nova.network.neutron [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.157270] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.211052] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1696.215875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-744d6b10-d714-4a03-a30c-43670ef94be2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.217522] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.762s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.285902] env[62525]: DEBUG nova.network.neutron [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updated VIF entry in instance network info cache for port 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1696.286328] env[62525]: DEBUG nova.network.neutron [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating instance_info_cache with network_info: [{"id": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "address": "fa:16:3e:7b:40:66", "network": {"id": "b6eccb5f-b3e7-4c58-89f6-689dc024fb2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1033245131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77eae31161444518aadfe27dd51c2081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ad012c1-5e", "ovs_interfaceid": "0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.299960] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 1badb7d9-692a-445e-ad47-ebd6e19f8197] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1696.348724] env[62525]: DEBUG oslo_vmware.api [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781793, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.357741] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.358210] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edd8cfa4-8831-4b8f-8153-a2c28a1863e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.368108] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1696.368108] env[62525]: value = "task-1781794" [ 1696.368108] env[62525]: _type = "Task" [ 1696.368108] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.380149] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.419084] env[62525]: DEBUG oslo_vmware.api [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781792, 'name': PowerOnVM_Task, 'duration_secs': 0.526113} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.419390] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.419599] env[62525]: INFO nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1696.419779] env[62525]: DEBUG nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.420781] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6229639-cc54-4ba6-a241-cc7a1e0395d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.458033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.458193] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.458476] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1696.645701] env[62525]: INFO nova.compute.manager [-] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Took 1.68 seconds to deallocate network for instance. [ 1696.659280] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.725355] env[62525]: INFO nova.compute.claims [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1696.794065] env[62525]: DEBUG oslo_concurrency.lockutils [req-60f77fe9-ead4-4b4e-94c5-1d46860604aa req-978554b8-b576-4d91-b4bd-2bbe1161d346 service nova] Releasing lock "refresh_cache-c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.803781] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: a24485a2-ea0c-4e63-9e9b-53b3cf58d5ca] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1696.846221] env[62525]: DEBUG oslo_vmware.api [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781793, 'name': PowerOnVM_Task, 'duration_secs': 0.567335} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.846491] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Resumed the VM {{(pid=62525) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1696.846678] env[62525]: DEBUG nova.compute.manager [None req-f79c20ea-f80f-4197-9714-91ac9d1ca51a tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.847478] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f305a76-b9a5-44dc-b008-0953117e7e36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.877634] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781794, 'name': PowerOffVM_Task, 'duration_secs': 0.241274} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.877919] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.878690] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.878953] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4b7a53e-fdc1-40fe-91d9-3c5bb090c422 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.886397] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1696.886397] env[62525]: value = "task-1781795" [ 1696.886397] env[62525]: _type = "Task" [ 1696.886397] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.897974] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1696.898311] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1696.898507] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369708', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'name': 'volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54d1a1ed-0880-4cca-8759-585dc65bdb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'serial': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1696.899496] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a985cfce-dbe6-43e5-b473-02b46f3ef953 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.922335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a861ddcf-d6e0-41d2-a815-2e7c66d62b9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.930419] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97342443-0293-4320-b9b8-a3b6108fda48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.953713] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea74810-dbfb-4107-8915-f1afd592740f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.957028] env[62525]: INFO nova.compute.manager [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Took 22.51 seconds to build instance. [ 1696.975232] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] The volume has not been displaced from its original location: [datastore1] volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c/volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1696.980664] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1696.982438] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef297bfd-1c81-4030-8a62-e4869026a13a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.004839] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1697.004839] env[62525]: value = "task-1781796" [ 1697.004839] env[62525]: _type = "Task" [ 1697.004839] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.016290] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.049542] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1697.155158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1697.162929] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.231617] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1697.234990] env[62525]: INFO nova.compute.resource_tracker [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating resource usage from migration 52a5775e-aa5d-4847-8a57-847f128e703a [ 1697.267689] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1697.267937] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1697.268641] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1697.268641] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1697.268641] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1697.268641] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1697.268815] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1697.268994] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1697.269233] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1697.269316] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1697.271211] env[62525]: DEBUG nova.virt.hardware [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1697.271211] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a28690-eeb0-45ca-a70a-74d3e1535c70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.280591] env[62525]: DEBUG nova.network.neutron [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Updating instance_info_cache with network_info: [{"id": "3ddb902b-b001-40bc-b635-a885589b1573", "address": "fa:16:3e:5b:71:06", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ddb902b-b0", "ovs_interfaceid": "3ddb902b-b001-40bc-b635-a885589b1573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.282821] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e52c861-cf29-49ad-b60c-dff3e22db50d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.307240] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: bfb20735-1de9-4741-9d6f-5cd2ffedbca6] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1697.462022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1c6c25d9-cad8-4ef5-8331-0decd0fa30d9 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.018s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.484752] env[62525]: DEBUG nova.compute.manager [req-a4cadd42-e1cf-468e-a1b7-8faf4a8e6e26 req-9b1ff858-386a-4f4b-bc4c-20e582802441 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-vif-deleted-5b882b83-1f22-4eb3-845f-766cac71d2de {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.484752] env[62525]: DEBUG nova.compute.manager [req-a4cadd42-e1cf-468e-a1b7-8faf4a8e6e26 req-9b1ff858-386a-4f4b-bc4c-20e582802441 service nova] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Received event network-vif-deleted-2ac247b9-f66a-46ac-9bea-2b9c1870ba66 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.503062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a024cc69-d42a-4638-97ba-c9a1357fff8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.518276] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Suspending the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1697.518572] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781796, 'name': ReconfigVM_Task, 'duration_secs': 0.160211} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.518854] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d68d5807-a84a-446b-a05d-4c291d887c01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.520493] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1697.525505] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5932f35b-4b44-4071-b74a-6a2d739392d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.546467] env[62525]: DEBUG oslo_vmware.api [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1697.546467] env[62525]: value = "task-1781797" [ 1697.546467] env[62525]: _type = "Task" [ 1697.546467] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.547785] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1697.547785] env[62525]: value = "task-1781798" [ 1697.547785] env[62525]: _type = "Task" [ 1697.547785] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.563324] env[62525]: DEBUG oslo_vmware.api [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781797, 'name': SuspendVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.566186] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781798, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.595618] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cc67ea-3676-4923-bc13-6c200bd72bbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.604510] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a01858-00ad-4583-ad9c-54ba90afed48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.642326] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ace22c6-e114-4609-9fbc-bc397b0b32fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.658574] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e303cf-b898-4f22-85bd-6cf2784b3510 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.670747] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.679309] env[62525]: DEBUG nova.compute.provider_tree [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.691623] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Successfully updated port: ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1697.788928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.789759] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance network_info: |[{"id": "3ddb902b-b001-40bc-b635-a885589b1573", "address": "fa:16:3e:5b:71:06", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ddb902b-b0", "ovs_interfaceid": "3ddb902b-b001-40bc-b635-a885589b1573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1697.790238] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:71:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ddb902b-b001-40bc-b635-a885589b1573', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1697.806242] env[62525]: DEBUG oslo.service.loopingcall [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.806709] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1697.807150] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8843d6de-2dab-415d-9a02-3d17c53c08bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.836118] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0a7ef997-bda5-452e-abe0-537146bf23f8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1697.849033] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1697.849033] env[62525]: value = "task-1781799" [ 1697.849033] env[62525]: _type = "Task" [ 1697.849033] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.857471] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781799, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.952573] env[62525]: DEBUG nova.compute.manager [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Received event network-changed-3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1697.952785] env[62525]: DEBUG nova.compute.manager [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Refreshing instance network info cache due to event network-changed-3ddb902b-b001-40bc-b635-a885589b1573. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1697.953056] env[62525]: DEBUG oslo_concurrency.lockutils [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] Acquiring lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.953215] env[62525]: DEBUG oslo_concurrency.lockutils [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] Acquired lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.953395] env[62525]: DEBUG nova.network.neutron [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Refreshing network info cache for port 3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1698.067019] env[62525]: DEBUG oslo_vmware.api [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781797, 'name': SuspendVM_Task} progress is 58%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.069967] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781798, 'name': ReconfigVM_Task, 'duration_secs': 0.194326} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.070401] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369708', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'name': 'volume-29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54d1a1ed-0880-4cca-8759-585dc65bdb1a', 'attached_at': '', 'detached_at': '', 'volume_id': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c', 'serial': '29e63c2d-fe5c-4937-8bbe-4a45dbe8493c'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1698.070788] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1698.072971] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15063f6-a76f-4368-a702-2a90863a814a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.083173] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1698.083480] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-145cc9a9-d461-4d45-99ea-4c2b241957ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.160054] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.177141] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1698.177479] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1698.177593] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Deleting the datastore file [datastore1] 54d1a1ed-0880-4cca-8759-585dc65bdb1a {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.177846] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6515b28d-2190-4b72-a254-e9316b066e5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.182781] env[62525]: DEBUG nova.scheduler.client.report [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1698.188024] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for the task: (returnval){ [ 1698.188024] env[62525]: value = "task-1781801" [ 1698.188024] env[62525]: _type = "Task" [ 1698.188024] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.196731] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.196866] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.197028] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1698.199458] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781801, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.342024] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 24d38b8e-c48b-4562-817e-7ae57658fb1b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1698.362343] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781799, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.561192] env[62525]: DEBUG oslo_vmware.api [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781797, 'name': SuspendVM_Task, 'duration_secs': 0.908707} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.561481] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Suspended the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1698.561745] env[62525]: DEBUG nova.compute.manager [None req-2bc04470-d45e-4f67-8596-b0a88046026c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1698.562575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0c3cef-201a-494d-a120-01dca5aded00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.659820] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.676972] env[62525]: DEBUG nova.network.neutron [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Updated VIF entry in instance network info cache for port 3ddb902b-b001-40bc-b635-a885589b1573. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1698.677365] env[62525]: DEBUG nova.network.neutron [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Updating instance_info_cache with network_info: [{"id": "3ddb902b-b001-40bc-b635-a885589b1573", "address": "fa:16:3e:5b:71:06", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ddb902b-b0", "ovs_interfaceid": "3ddb902b-b001-40bc-b635-a885589b1573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.692229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.475s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.692433] env[62525]: INFO nova.compute.manager [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Migrating [ 1698.692713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.692870] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.701474] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.045s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.701725] env[62525]: DEBUG nova.objects.instance [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lazy-loading 'resources' on Instance uuid 73156235-1b13-4fda-8957-ed8cd88ceb43 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1698.706027] env[62525]: DEBUG oslo_vmware.api [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Task: {'id': task-1781801, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157576} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.706445] env[62525]: INFO nova.compute.rpcapi [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1698.708152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.715197] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1698.715456] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1698.715678] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1698.756084] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1698.780787] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1698.781224] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1d920d5-715b-427f-8a5e-84c0c1fe307a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.791621] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b649f103-7d81-4e7b-ba7f-651f29477b1d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.831048] env[62525]: ERROR nova.compute.manager [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Failed to detach volume 29e63c2d-fe5c-4937-8bbe-4a45dbe8493c from /dev/sda: nova.exception.InstanceNotFound: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Traceback (most recent call last): [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self.driver.rebuild(**kwargs) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise NotImplementedError() [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] NotImplementedError [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] During handling of the above exception, another exception occurred: [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Traceback (most recent call last): [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self.driver.detach_volume(context, old_connection_info, [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] return self._volumeops.detach_volume(connection_info, instance) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._detach_volume_vmdk(connection_info, instance) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] stable_ref.fetch_moref(session) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] nova.exception.InstanceNotFound: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. [ 1698.831048] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.849450] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: dfa4b57e-6219-42eb-b257-263124f9a980] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1698.862228] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781799, 'name': CreateVM_Task, 'duration_secs': 0.601235} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.863628] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1698.863628] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.863628] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.863891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1698.864110] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14e4b730-596c-451c-b685-3e5f0d0b4a84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.868486] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1698.868486] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52222ce6-da69-41f8-8867-162e46b19803" [ 1698.868486] env[62525]: _type = "Task" [ 1698.868486] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.880236] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52222ce6-da69-41f8-8867-162e46b19803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.911719] env[62525]: DEBUG nova.network.neutron [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updating instance_info_cache with network_info: [{"id": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "address": "fa:16:3e:a5:85:3f", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece5e526-9d", "ovs_interfaceid": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.991307] env[62525]: DEBUG nova.compute.utils [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Build of instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a aborted: Failed to rebuild volume backed instance. {{(pid=62525) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1698.992670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.992900] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.993595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.993820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.993991] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.995990] env[62525]: INFO nova.compute.manager [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Terminating instance [ 1698.998421] env[62525]: ERROR nova.compute.manager [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a aborted: Failed to rebuild volume backed instance. [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Traceback (most recent call last): [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self.driver.rebuild(**kwargs) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise NotImplementedError() [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] NotImplementedError [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] During handling of the above exception, another exception occurred: [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Traceback (most recent call last): [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._detach_root_volume(context, instance, root_bdm) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] with excutils.save_and_reraise_exception(): [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self.force_reraise() [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise self.value [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self.driver.detach_volume(context, old_connection_info, [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] return self._volumeops.detach_volume(connection_info, instance) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._detach_volume_vmdk(connection_info, instance) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] stable_ref.fetch_moref(session) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] nova.exception.InstanceNotFound: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] During handling of the above exception, another exception occurred: [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Traceback (most recent call last): [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] yield [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1698.998421] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._do_rebuild_instance_with_claim( [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._do_rebuild_instance( [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._rebuild_default_impl(**kwargs) [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] self._rebuild_volume_backed_instance( [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] raise exception.BuildAbortException( [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] nova.exception.BuildAbortException: Build of instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a aborted: Failed to rebuild volume backed instance. [ 1698.999743] env[62525]: ERROR nova.compute.manager [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] [ 1699.001196] env[62525]: DEBUG nova.compute.manager [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1699.001484] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1699.002728] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a16a64-9de8-4f5e-a35e-142aa788ae96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.011133] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1699.011386] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83578a97-4a75-49ba-8d63-5d8992e55a95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.017478] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1699.017478] env[62525]: value = "task-1781802" [ 1699.017478] env[62525]: _type = "Task" [ 1699.017478] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.027473] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.160481] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.183280] env[62525]: DEBUG oslo_concurrency.lockutils [req-a99594c8-42c5-4748-9698-5c43914826c9 req-ad99ec41-73a3-4048-b8bb-d176963d3573 service nova] Releasing lock "refresh_cache-3b1a825f-b6a5-4822-86a5-57972f34748c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.228672] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.228908] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.229098] env[62525]: DEBUG nova.network.neutron [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1699.358469] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: cafae62e-b001-4ee0-8e89-4da9c60cf488] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1699.382547] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52222ce6-da69-41f8-8867-162e46b19803, 'name': SearchDatastore_Task, 'duration_secs': 0.014354} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.382863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.383239] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1699.383335] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.383479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.383662] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1699.388739] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab36bfd-1696-4fe9-b5bd-fa8bd98d8195 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.397918] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1699.398144] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1699.401150] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b73d2448-5ab3-404f-9a8c-d37d8cdeac9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.407254] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1699.407254] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5242a4ea-983b-5216-666b-98d49dc656b3" [ 1699.407254] env[62525]: _type = "Task" [ 1699.407254] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.415059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.415376] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Instance network_info: |[{"id": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "address": "fa:16:3e:a5:85:3f", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece5e526-9d", "ovs_interfaceid": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1699.415637] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5242a4ea-983b-5216-666b-98d49dc656b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.418236] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:85:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afd3feb3-ffcc-4499-a2c2-eb6a48aefde9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ece5e526-9d41-4006-8159-5c2401d7fbbf', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1699.426041] env[62525]: DEBUG oslo.service.loopingcall [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1699.427539] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1699.427815] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98a68b76-0d47-4ea8-80fd-55e882250b80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.451101] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1699.451101] env[62525]: value = "task-1781803" [ 1699.451101] env[62525]: _type = "Task" [ 1699.451101] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.460114] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781803, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.516182] env[62525]: DEBUG nova.compute.manager [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Received event network-vif-plugged-ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1699.516478] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Acquiring lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.517099] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.517099] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.517099] env[62525]: DEBUG nova.compute.manager [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] No waiting events found dispatching network-vif-plugged-ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1699.517269] env[62525]: WARNING nova.compute.manager [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Received unexpected event network-vif-plugged-ece5e526-9d41-4006-8159-5c2401d7fbbf for instance with vm_state building and task_state spawning. [ 1699.517439] env[62525]: DEBUG nova.compute.manager [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Received event network-changed-ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1699.517599] env[62525]: DEBUG nova.compute.manager [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Refreshing instance network info cache due to event network-changed-ece5e526-9d41-4006-8159-5c2401d7fbbf. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1699.517826] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Acquiring lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.517954] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Acquired lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.518148] env[62525]: DEBUG nova.network.neutron [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Refreshing network info cache for port ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1699.530940] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6182cf32-c5eb-4183-9cf4-c17013a429ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.537494] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781802, 'name': PowerOffVM_Task, 'duration_secs': 0.510154} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.538159] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1699.538335] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1699.538609] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c74d5eb-67c4-4aaa-85a9-ce413efacc7d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.543152] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea26a4a3-f183-44a1-a500-4681b42bb89c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.582148] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99470041-a49d-4831-bf5d-1899f4160663 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.588701] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04229bea-cb43-4d23-ae37-fe3b1d810450 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.604302] env[62525]: DEBUG nova.compute.provider_tree [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.636241] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1699.636483] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1699.636665] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleting the datastore file [datastore1] c75091c3-45d2-4c71-b2ad-d38e8a449624 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1699.638203] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e557063-6ebb-43c5-bc10-bdfb5bccf26b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.643554] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1699.643554] env[62525]: value = "task-1781805" [ 1699.643554] env[62525]: _type = "Task" [ 1699.643554] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.652341] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.660811] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.861636] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 1a0b2c60-42da-4677-9b9a-d5c8a6acd1f6] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1699.918945] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5242a4ea-983b-5216-666b-98d49dc656b3, 'name': SearchDatastore_Task, 'duration_secs': 0.019573} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.919896] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3dff602-e855-441d-98d8-cdd9f6297927 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.925871] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1699.925871] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e530ff-a89f-beb2-f9d3-3a301b2a2e21" [ 1699.925871] env[62525]: _type = "Task" [ 1699.925871] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.933717] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e530ff-a89f-beb2-f9d3-3a301b2a2e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.961856] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781803, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.972635] env[62525]: DEBUG nova.network.neutron [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.080559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.080815] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.081040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.081232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.081400] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.085772] env[62525]: INFO nova.compute.manager [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Terminating instance [ 1700.089922] env[62525]: DEBUG nova.compute.manager [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1700.090139] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.090947] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e739cd0b-35e1-4586-aeae-0b0b232a6c42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.098398] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.098630] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea8d89e8-22c3-4dcc-8daa-461f6cde7df1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.107919] env[62525]: DEBUG nova.scheduler.client.report [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.153259] env[62525]: DEBUG oslo_vmware.api [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.158536] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1700.158734] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1700.158915] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1700.159101] env[62525]: INFO nova.compute.manager [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1700.159338] env[62525]: DEBUG oslo.service.loopingcall [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.159809] env[62525]: DEBUG nova.compute.manager [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1700.159909] env[62525]: DEBUG nova.network.neutron [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1700.166570] env[62525]: DEBUG oslo_vmware.api [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781786, 'name': ReconfigVM_Task, 'duration_secs': 6.098463} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.166794] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.167041] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Reconfigured VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1700.292787] env[62525]: DEBUG nova.network.neutron [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updated VIF entry in instance network info cache for port ece5e526-9d41-4006-8159-5c2401d7fbbf. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1700.293185] env[62525]: DEBUG nova.network.neutron [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updating instance_info_cache with network_info: [{"id": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "address": "fa:16:3e:a5:85:3f", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece5e526-9d", "ovs_interfaceid": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.306790] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1700.307261] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1700.307503] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] 4278fbb1-d2bd-4e92-aaca-260d40aa26b1 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1700.310089] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fb58261-4b1c-4830-b7d0-f65784a2c8d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.318778] env[62525]: DEBUG oslo_vmware.api [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1700.318778] env[62525]: value = "task-1781807" [ 1700.318778] env[62525]: _type = "Task" [ 1700.318778] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.328309] env[62525]: DEBUG oslo_vmware.api [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.367356] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 70313696-a9cc-499c-b9e6-329a71c4b915] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1700.438425] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e530ff-a89f-beb2-f9d3-3a301b2a2e21, 'name': SearchDatastore_Task, 'duration_secs': 0.010541} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.438630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.438888] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1700.439297] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c546780-0e6d-43c8-83bb-859092a25a66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.446729] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1700.446729] env[62525]: value = "task-1781808" [ 1700.446729] env[62525]: _type = "Task" [ 1700.446729] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.457801] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.463597] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781803, 'name': CreateVM_Task, 'duration_secs': 0.559487} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.463597] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1700.463789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.463958] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.464298] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1700.464581] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95ac9c39-fa92-4727-9ff0-407e6e3bd644 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.469615] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1700.469615] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52430d92-9c5a-c428-31cf-efae40c33a6c" [ 1700.469615] env[62525]: _type = "Task" [ 1700.469615] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.476333] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.483714] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52430d92-9c5a-c428-31cf-efae40c33a6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.526087] env[62525]: DEBUG nova.compute.manager [req-8da73641-792c-41e1-b4fb-00da72dae1ec req-a7a2bd9c-58be-4c5e-b4d6-2d8bcff14816 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Received event network-vif-deleted-d94efd19-31bc-4597-9716-352a3f25ecbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.526293] env[62525]: INFO nova.compute.manager [req-8da73641-792c-41e1-b4fb-00da72dae1ec req-a7a2bd9c-58be-4c5e-b4d6-2d8bcff14816 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Neutron deleted interface d94efd19-31bc-4597-9716-352a3f25ecbf; detaching it from the instance and deleting it from the info cache [ 1700.526460] env[62525]: DEBUG nova.network.neutron [req-8da73641-792c-41e1-b4fb-00da72dae1ec req-a7a2bd9c-58be-4c5e-b4d6-2d8bcff14816 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.613743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.616340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.461s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.616553] env[62525]: DEBUG nova.objects.instance [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lazy-loading 'resources' on Instance uuid 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.642795] env[62525]: INFO nova.scheduler.client.report [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Deleted allocations for instance 73156235-1b13-4fda-8957-ed8cd88ceb43 [ 1700.796850] env[62525]: DEBUG oslo_concurrency.lockutils [req-318b517b-d538-4974-b1a2-5a43465fa5f5 req-aff29bfc-5e64-41a2-b0fc-8bbe0b892358 service nova] Releasing lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.830479] env[62525]: DEBUG oslo_vmware.api [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.830747] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1700.830934] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1700.831174] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1700.831362] env[62525]: INFO nova.compute.manager [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Took 0.74 seconds to destroy the instance on the hypervisor. [ 1700.831604] env[62525]: DEBUG oslo.service.loopingcall [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.831825] env[62525]: DEBUG nova.compute.manager [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1700.831924] env[62525]: DEBUG nova.network.neutron [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1700.870643] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: d8c7d102-46e6-40fe-a864-a72590af4982] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1700.960027] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.981519] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52430d92-9c5a-c428-31cf-efae40c33a6c, 'name': SearchDatastore_Task, 'duration_secs': 0.02198} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.981725] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.981967] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1700.982227] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.982387] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.982574] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1700.982854] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f30cd6f-aee0-4d0a-b343-5f563ce273c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.997370] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1700.997370] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1700.997370] env[62525]: DEBUG nova.network.neutron [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.998676] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-059e5770-7761-4c66-a8eb-31ce24b55eee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.008293] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1701.008293] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d79fe2-3883-f795-d437-69ed98aa5f5d" [ 1701.008293] env[62525]: _type = "Task" [ 1701.008293] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.017660] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d79fe2-3883-f795-d437-69ed98aa5f5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.021786] env[62525]: DEBUG oslo_concurrency.lockutils [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.032562] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6657ce6-3c87-4007-a6ef-9e39cdab2751 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.042536] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd9a492-168f-424c-ba81-b83cf7130cd1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.079326] env[62525]: DEBUG nova.compute.manager [req-8da73641-792c-41e1-b4fb-00da72dae1ec req-a7a2bd9c-58be-4c5e-b4d6-2d8bcff14816 service nova] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Detach interface failed, port_id=d94efd19-31bc-4597-9716-352a3f25ecbf, reason: Instance c75091c3-45d2-4c71-b2ad-d38e8a449624 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1701.152542] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58f1ea77-cc69-46bf-8530-b31b28424784 tempest-DeleteServersAdminTestJSON-861260245 tempest-DeleteServersAdminTestJSON-861260245-project-member] Lock "73156235-1b13-4fda-8957-ed8cd88ceb43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.652s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.374228] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 7a92bac8-9cee-41ed-81e3-08b48432fe7c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1701.381926] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9d4630-47c1-4954-9198-68396143efd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.389644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbaf1fd5-d79a-472f-9c13-2273a9dcf8ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.420718] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee6a9f-56d3-47f0-9815-57277203e37a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.429017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9beb406-e0b1-46a8-bcb7-8ad7f0f5eab0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.442743] env[62525]: DEBUG nova.compute.provider_tree [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1701.457163] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607408} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.457761] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1701.457979] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1701.458246] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-537db821-9bc5-4dd4-af87-e594ed6efd2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.464936] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1701.464936] env[62525]: value = "task-1781809" [ 1701.464936] env[62525]: _type = "Task" [ 1701.464936] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.473016] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.502242] env[62525]: INFO nova.compute.manager [-] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Took 1.34 seconds to deallocate network for instance. [ 1701.524246] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d79fe2-3883-f795-d437-69ed98aa5f5d, 'name': SearchDatastore_Task, 'duration_secs': 0.033457} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.525085] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-502fc5af-2efd-42f0-b8ea-2bb543701984 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.534070] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1701.534070] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f03d2b-70dd-cecd-806a-6f78e14db66d" [ 1701.534070] env[62525]: _type = "Task" [ 1701.534070] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.538132] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.538279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.538523] env[62525]: DEBUG nova.network.neutron [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1701.546377] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f03d2b-70dd-cecd-806a-6f78e14db66d, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.546807] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.547969] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8abf0305-2000-4ffe-aa88-e2b355383ea3/8abf0305-2000-4ffe-aa88-e2b355383ea3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1701.548306] env[62525]: DEBUG nova.compute.manager [req-23417999-3418-41e9-a96d-17c48aef62d3 req-bec11163-e3cf-47a6-853a-266a3703826c service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Received event network-vif-deleted-713f3c8d-70c6-4226-9362-57f90126f716 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1701.548306] env[62525]: INFO nova.compute.manager [req-23417999-3418-41e9-a96d-17c48aef62d3 req-bec11163-e3cf-47a6-853a-266a3703826c service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Neutron deleted interface 713f3c8d-70c6-4226-9362-57f90126f716; detaching it from the instance and deleting it from the info cache [ 1701.548467] env[62525]: DEBUG nova.network.neutron [req-23417999-3418-41e9-a96d-17c48aef62d3 req-bec11163-e3cf-47a6-853a-266a3703826c service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.549680] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a23cc183-d445-4ec8-a28e-82b8255dbb3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.563222] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1701.563222] env[62525]: value = "task-1781810" [ 1701.563222] env[62525]: _type = "Task" [ 1701.563222] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.576709] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.641580] env[62525]: DEBUG nova.network.neutron [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.879394] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 06716b84-3761-40b0-b76a-0c6ebf0d6aa7] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1701.978060] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066063} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.979758] env[62525]: ERROR nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] [req-eda3946f-f4ce-4d3c-a727-ae8c8de62e92] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eda3946f-f4ce-4d3c-a727-ae8c8de62e92"}]} [ 1701.980087] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1701.984764] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb17f09-cff3-409c-a343-bcd66b0fdfdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.017297] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1702.019282] env[62525]: DEBUG nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1702.021657] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95ba6142-1fdb-448e-ab42-328a06120e59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.037715] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.038597] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78cc9ca-5090-4c79-8cdd-425a883f13e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.065518] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1702.073055] env[62525]: DEBUG nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1702.073055] env[62525]: DEBUG nova.compute.provider_tree [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1702.075447] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6650b78f-fb99-4a7a-a0df-f1f363ea9ce8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.078080] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1702.078080] env[62525]: value = "task-1781811" [ 1702.078080] env[62525]: _type = "Task" [ 1702.078080] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.093888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f799d5f6-5378-4252-b8aa-1f51edf6722d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.110397] env[62525]: DEBUG nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1702.116205] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781810, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.116565] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.145216] env[62525]: INFO nova.compute.manager [-] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Took 1.31 seconds to deallocate network for instance. [ 1702.146157] env[62525]: DEBUG nova.compute.manager [req-23417999-3418-41e9-a96d-17c48aef62d3 req-bec11163-e3cf-47a6-853a-266a3703826c service nova] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Detach interface failed, port_id=713f3c8d-70c6-4226-9362-57f90126f716, reason: Instance 4278fbb1-d2bd-4e92-aaca-260d40aa26b1 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1702.147594] env[62525]: DEBUG nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1702.354916] env[62525]: INFO nova.network.neutron [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Port 6c93e506-f746-4d2e-922a-f389df5494a8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1702.355407] env[62525]: DEBUG nova.network.neutron [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.356681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.356898] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.357199] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.357400] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.358197] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.361629] env[62525]: INFO nova.compute.manager [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Terminating instance [ 1702.364966] env[62525]: DEBUG nova.compute.manager [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1702.365274] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2535069d-da69-4b8b-8440-dcea85939c8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.372834] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.373158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.373305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "e8586018-100e-4729-97fc-98effa87cd9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.373509] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.373691] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.377796] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb417a6d-ed66-472e-9168-8d34428d306a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.388875] env[62525]: INFO nova.compute.manager [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Terminating instance [ 1702.393012] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5893bdc8-490a-4d5b-a0e1-7f7d19fb24f9] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1702.395346] env[62525]: DEBUG nova.compute.manager [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1702.395567] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.396978] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30dd0b79-996c-4ca8-abc9-08adc8d09b97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.404907] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.404907] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-637b5b43-d328-4565-b0af-3880b07ff8c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.422239] env[62525]: WARNING nova.virt.vmwareapi.driver [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. [ 1702.422476] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.425599] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-610db69a-66d1-4cbf-bcdb-46c74293a545 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.430713] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1702.430713] env[62525]: value = "task-1781812" [ 1702.430713] env[62525]: _type = "Task" [ 1702.430713] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.438375] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec4678c-51e0-458a-bd40-7fe05a5e5619 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.455328] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.481968] env[62525]: WARNING nova.virt.vmwareapi.vmops [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. [ 1702.482463] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1702.482463] env[62525]: INFO nova.compute.manager [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Took 0.12 seconds to destroy the instance on the hypervisor. [ 1702.482860] env[62525]: DEBUG oslo.service.loopingcall [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.483582] env[62525]: DEBUG nova.compute.manager [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1702.483687] env[62525]: DEBUG nova.network.neutron [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1702.523556] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd15c2-3460-4e18-8c9f-e83b527e6373 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.532712] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442b0ca7-e245-4500-b8b1-92f37429725d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.575758] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d18fbd-e3a4-4f5b-85aa-9847e38751af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.583638] env[62525]: DEBUG nova.compute.manager [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1702.583767] env[62525]: DEBUG nova.compute.manager [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing instance network info cache due to event network-changed-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1702.583902] env[62525]: DEBUG oslo_concurrency.lockutils [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] Acquiring lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.586353] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.590813] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d16d107-dbe6-46ab-b0bf-6bee86a67cb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.599354] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3da3678-e319-4540-9618-04e697e67bc0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.613983] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781811, 'name': ReconfigVM_Task, 'duration_secs': 0.478106} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.613983] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1702.613983] env[62525]: value = "task-1781813" [ 1702.613983] env[62525]: _type = "Task" [ 1702.613983] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.613983] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696904} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.613983] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1702.613983] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 8abf0305-2000-4ffe-aa88-e2b355383ea3/8abf0305-2000-4ffe-aa88-e2b355383ea3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1702.613983] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1702.613983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24b3e5d7-2aed-4d8a-a7ee-cc375b8bd38f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.618590] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4547986-1f2e-42c9-bc58-905f401586b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.630179] env[62525]: DEBUG nova.compute.provider_tree [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1702.636312] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.638890] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1702.638890] env[62525]: value = "task-1781815" [ 1702.638890] env[62525]: _type = "Task" [ 1702.638890] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.639155] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1702.639155] env[62525]: value = "task-1781814" [ 1702.639155] env[62525]: _type = "Task" [ 1702.639155] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.655233] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.655233] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781815, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.657479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.862855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.865271] env[62525]: DEBUG oslo_concurrency.lockutils [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] Acquired lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.867798] env[62525]: DEBUG nova.network.neutron [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Refreshing network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1702.901224] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 61f05e69-5e90-47da-9f47-3651b580a23c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1702.944152] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781812, 'name': PowerOffVM_Task, 'duration_secs': 0.211521} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.944654] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1702.944991] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1702.945334] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54aaa1ee-89b1-4d03-9473-c83ca6edf93d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.041136] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1703.041372] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1703.041554] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleting the datastore file [datastore1] e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1703.041820] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1bc227a-e65a-40c3-aa3e-9197e5daec99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.048636] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for the task: (returnval){ [ 1703.048636] env[62525]: value = "task-1781817" [ 1703.048636] env[62525]: _type = "Task" [ 1703.048636] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.059472] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.125156] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781813, 'name': PowerOffVM_Task, 'duration_secs': 0.227845} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.125156] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1703.125301] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1703.153661] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781815, 'name': Rename_Task, 'duration_secs': 0.140879} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.156825] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1703.157167] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110226} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.157410] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba571bea-9589-49f1-bdcc-bf7b31e55763 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.159048] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1703.159787] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6727b151-b5d9-4daf-ad0c-730c7a8e9d00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.183212] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 8abf0305-2000-4ffe-aa88-e2b355383ea3/8abf0305-2000-4ffe-aa88-e2b355383ea3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1703.185068] env[62525]: DEBUG nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1703.185317] env[62525]: DEBUG nova.compute.provider_tree [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 110 to 111 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1703.185510] env[62525]: DEBUG nova.compute.provider_tree [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1703.190544] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-093ff7bc-c962-4e49-8819-4717a956e135 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.211177] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1703.211177] env[62525]: value = "task-1781818" [ 1703.211177] env[62525]: _type = "Task" [ 1703.211177] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.211177] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.593s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.213524] env[62525]: DEBUG oslo_concurrency.lockutils [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.192s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.222613] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1703.222613] env[62525]: value = "task-1781819" [ 1703.222613] env[62525]: _type = "Task" [ 1703.222613] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.231431] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.240740] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781819, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.248626] env[62525]: INFO nova.scheduler.client.report [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Deleted allocations for instance 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6 [ 1703.369968] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9476bf68-1258-4b1b-ab28-c58eb5aa2b98 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-6c93e506-f746-4d2e-922a-f389df5494a8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.320s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.405725] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 7c8474fd-2ca5-4ecc-b2e6-4248baafd639] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1703.529837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f01426-f3e2-427e-b225-ff2e46959382 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.537145] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aeb8cb5-4097-4764-a20a-0d1b9e767a13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.579027] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9eee2-c026-44ef-ae31-6264d3016d34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.582645] env[62525]: DEBUG nova.network.neutron [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.585089] env[62525]: DEBUG nova.compute.manager [req-61cea4ac-65a8-452c-871b-cb46ce44d964 req-2eed712b-405d-4708-839d-704169651c35 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Received event network-vif-deleted-cf50b3b9-451f-4000-8f23-dd36c9806f5f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1703.585089] env[62525]: INFO nova.compute.manager [req-61cea4ac-65a8-452c-871b-cb46ce44d964 req-2eed712b-405d-4708-839d-704169651c35 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Neutron deleted interface cf50b3b9-451f-4000-8f23-dd36c9806f5f; detaching it from the instance and deleting it from the info cache [ 1703.585289] env[62525]: DEBUG nova.network.neutron [req-61cea4ac-65a8-452c-871b-cb46ce44d964 req-2eed712b-405d-4708-839d-704169651c35 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.599306] env[62525]: DEBUG oslo_vmware.api [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Task: {'id': task-1781817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164532} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.599306] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1703.599306] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1703.599306] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1703.599531] env[62525]: INFO nova.compute.manager [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1703.600064] env[62525]: DEBUG oslo.service.loopingcall [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1703.600885] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0e49b6-20e4-4ffd-bcad-19377f07f506 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.605901] env[62525]: DEBUG nova.compute.manager [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1703.606088] env[62525]: DEBUG nova.network.neutron [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1703.619274] env[62525]: DEBUG nova.compute.provider_tree [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.632476] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1703.632750] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1703.632909] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1703.633165] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1703.633340] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1703.633494] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1703.633701] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1703.633862] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1703.634058] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1703.634276] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1703.635152] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1703.642657] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ea839c5-eef6-4cae-bb48-a0aa66a61ad6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.661284] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1703.661284] env[62525]: value = "task-1781820" [ 1703.661284] env[62525]: _type = "Task" [ 1703.661284] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.671567] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.693177] env[62525]: DEBUG nova.network.neutron [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updated VIF entry in instance network info cache for port c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.693554] env[62525]: DEBUG nova.network.neutron [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [{"id": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "address": "fa:16:3e:48:79:10", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d4ce3d-02", "ovs_interfaceid": "c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.724440] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781818, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.736910] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.762256] env[62525]: DEBUG oslo_concurrency.lockutils [None req-78d37e62-dfb8-4415-b5d7-ce1cda81317a tempest-ServersTestMultiNic-1593095087 tempest-ServersTestMultiNic-1593095087-project-member] Lock "5b1a2a46-df4d-41c6-a750-9ec3c75e57f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.523s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.909480] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 10f10329-9a7d-4e1b-8fb4-90350169e518] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1704.088343] env[62525]: INFO nova.compute.manager [-] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Took 1.60 seconds to deallocate network for instance. [ 1704.088681] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7f3c949-7c4f-4e04-a007-c22ad79642bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.101341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f32eb4-72c0-417e-9e89-c0364e5fb5c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.123615] env[62525]: DEBUG nova.scheduler.client.report [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1704.141331] env[62525]: DEBUG nova.compute.manager [req-61cea4ac-65a8-452c-871b-cb46ce44d964 req-2eed712b-405d-4708-839d-704169651c35 service nova] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Detach interface failed, port_id=cf50b3b9-451f-4000-8f23-dd36c9806f5f, reason: Instance 54d1a1ed-0880-4cca-8759-585dc65bdb1a could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1704.171948] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781820, 'name': ReconfigVM_Task, 'duration_secs': 0.207279} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.172299] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1704.196761] env[62525]: DEBUG oslo_concurrency.lockutils [req-79217ced-2c67-461f-98ef-d1d91e456f5e req-38678d51-fd3f-4ada-b8d5-92f69bf14aca service nova] Releasing lock "refresh_cache-c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.223760] env[62525]: DEBUG oslo_vmware.api [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781818, 'name': PowerOnVM_Task, 'duration_secs': 0.656719} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.223760] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1704.223760] env[62525]: INFO nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1704.223876] env[62525]: DEBUG nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1704.225065] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998c61fa-50b2-45fa-9ad6-e7979b60de7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.237854] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781819, 'name': ReconfigVM_Task, 'duration_secs': 0.687712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.240900] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 8abf0305-2000-4ffe-aa88-e2b355383ea3/8abf0305-2000-4ffe-aa88-e2b355383ea3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1704.242132] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35a585a3-67e4-4629-8851-d90e817f07b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.249353] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1704.249353] env[62525]: value = "task-1781821" [ 1704.249353] env[62525]: _type = "Task" [ 1704.249353] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.256939] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781821, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.413088] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 1fe967d9-351a-4b44-b7cb-d3c8395d9516] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1704.595370] env[62525]: DEBUG nova.compute.manager [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.595370] env[62525]: DEBUG nova.compute.manager [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing instance network info cache due to event network-changed-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1704.595370] env[62525]: DEBUG oslo_concurrency.lockutils [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.595370] env[62525]: DEBUG oslo_concurrency.lockutils [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.596075] env[62525]: DEBUG nova.network.neutron [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1704.645026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.429s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.645026] env[62525]: INFO nova.compute.manager [None req-50535a00-f54a-4639-b3f3-e3e03fe61ddd tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Successfully reverted task state from rebuilding on failure for instance. [ 1704.650511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.613s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.650964] env[62525]: DEBUG nova.objects.instance [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lazy-loading 'resources' on Instance uuid c75091c3-45d2-4c71-b2ad-d38e8a449624 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.652733] env[62525]: INFO nova.compute.manager [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Took 0.56 seconds to detach 1 volumes for instance. [ 1704.655262] env[62525]: DEBUG nova.compute.manager [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Deleting volume: 29e63c2d-fe5c-4937-8bbe-4a45dbe8493c {{(pid=62525) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1704.680023] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1704.681033] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1704.681354] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1704.681805] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1704.682126] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1704.682442] env[62525]: DEBUG nova.virt.hardware [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1704.693023] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1704.693023] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48de15e0-c821-48b5-92ce-b26a4bdffdb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.722157] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1704.722157] env[62525]: value = "task-1781822" [ 1704.722157] env[62525]: _type = "Task" [ 1704.722157] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.737706] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781822, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.760826] env[62525]: INFO nova.compute.manager [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Took 19.94 seconds to build instance. [ 1704.766041] env[62525]: DEBUG nova.network.neutron [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.772547] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781821, 'name': Rename_Task, 'duration_secs': 0.147611} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.772547] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1704.772547] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a9fd7b9-4cb5-476f-9985-1b56d1009e23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.780336] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1704.780336] env[62525]: value = "task-1781824" [ 1704.780336] env[62525]: _type = "Task" [ 1704.780336] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.791331] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.916483] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f99e55bd-1e99-4e41-9dd7-fc12b75f4f6b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1705.216969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.233829] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781822, 'name': ReconfigVM_Task, 'duration_secs': 0.194242} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.234271] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1705.235482] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307327f2-dbad-43c6-b6c3-2f90d663437d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.260046] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1705.263582] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1084379f-4702-41df-b241-f986b3652a47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.281610] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bbeb2452-3d07-4374-8d94-b0283898590a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.481s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.281996] env[62525]: INFO nova.compute.manager [-] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Took 1.68 seconds to deallocate network for instance. [ 1705.297645] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1705.297645] env[62525]: value = "task-1781825" [ 1705.297645] env[62525]: _type = "Task" [ 1705.297645] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.300631] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781824, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.309452] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781825, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.422954] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 85e8da3c-d9d5-4569-b454-bcbb3d7a9bb5] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1705.519699] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1545a0fb-be3e-4f7f-8329-6f89713189fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.527691] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf89fa05-a53d-45a7-8544-157edf05f1fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.560357] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11828e56-bbf4-480b-9d2d-284232e0b2ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.569597] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded26072-62eb-4e5a-a115-88108477fd75 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.583164] env[62525]: DEBUG nova.compute.provider_tree [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.716310] env[62525]: DEBUG nova.network.neutron [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updated VIF entry in instance network info cache for port 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.716731] env[62525]: DEBUG nova.network.neutron [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.795742] env[62525]: DEBUG oslo_vmware.api [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781824, 'name': PowerOnVM_Task, 'duration_secs': 0.567585} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.796677] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.796952] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1705.797176] env[62525]: INFO nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1705.797376] env[62525]: DEBUG nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1705.798166] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e1b479-a694-436e-b0cd-67908d8be1fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.817112] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.928706] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 29aaac3b-1f0e-40fe-9805-a0e6e6ae597d] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1706.088021] env[62525]: DEBUG nova.scheduler.client.report [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1706.222088] env[62525]: DEBUG oslo_concurrency.lockutils [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.224748] env[62525]: DEBUG nova.compute.manager [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Received event network-vif-deleted-dc247c87-0d2d-47bf-9d66-5e81d9237fa6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1706.224748] env[62525]: INFO nova.compute.manager [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Neutron deleted interface dc247c87-0d2d-47bf-9d66-5e81d9237fa6; detaching it from the instance and deleting it from the info cache [ 1706.224748] env[62525]: DEBUG nova.network.neutron [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.224748] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.224748] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.225170] env[62525]: DEBUG nova.compute.manager [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.226195] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20047f97-544e-4648-97e9-378a5f0b46b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.234791] env[62525]: DEBUG nova.compute.manager [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1706.234791] env[62525]: DEBUG nova.objects.instance [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'flavor' on Instance uuid 3b1a825f-b6a5-4822-86a5-57972f34748c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.316569] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781825, 'name': ReconfigVM_Task, 'duration_secs': 0.768488} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.317146] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03/462bc19d-1eaa-4c57-8ebb-412a97614f03.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1706.317437] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1706.329366] env[62525]: INFO nova.compute.manager [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Took 20.21 seconds to build instance. [ 1706.430857] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e8864d73-35e6-490b-a07c-e8cac8baf880] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1706.597021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.598336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.941s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.598772] env[62525]: DEBUG nova.objects.instance [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'resources' on Instance uuid 4278fbb1-d2bd-4e92-aaca-260d40aa26b1 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.623356] env[62525]: INFO nova.scheduler.client.report [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleted allocations for instance c75091c3-45d2-4c71-b2ad-d38e8a449624 [ 1706.733021] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1e2eb22-ee4d-4049-81b9-d88e91efdcef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.743602] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6860ffd-575c-423c-8315-d6a55d382403 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.761753] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1706.764270] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2e576a3-96ea-40f5-ac7f-b7d9c17abaa5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.774026] env[62525]: DEBUG oslo_vmware.api [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1706.774026] env[62525]: value = "task-1781826" [ 1706.774026] env[62525]: _type = "Task" [ 1706.774026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.794989] env[62525]: DEBUG nova.compute.manager [req-18f2a13d-4265-411c-a8cc-651519e5bc3e req-2dd19d57-758c-4ba8-911a-1b1f71706bb4 service nova] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Detach interface failed, port_id=dc247c87-0d2d-47bf-9d66-5e81d9237fa6, reason: Instance e8586018-100e-4729-97fc-98effa87cd9e could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1706.799710] env[62525]: DEBUG oslo_vmware.api [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.828587] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdef7c5-5bd7-4920-820f-40e6fc1c13a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.831558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-21d903fa-ae6d-477a-8dab-5995259b907d tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.732s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.851586] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900bcf28-1dfe-404d-983d-c74635959c5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.871230] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1706.934871] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 1f3792c0-9f86-4d76-a1a6-28d492869046] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1707.134280] env[62525]: DEBUG oslo_concurrency.lockutils [None req-04c33451-bad0-42fc-949a-d87fa0b82df5 tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "c75091c3-45d2-4c71-b2ad-d38e8a449624" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.141s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.286749] env[62525]: DEBUG oslo_vmware.api [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781826, 'name': PowerOffVM_Task, 'duration_secs': 0.34202} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.287559] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1707.287795] env[62525]: DEBUG nova.compute.manager [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1707.288640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4d3ae7-5b12-49e3-8bda-0866da75a1aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.431182] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a552dc6-679e-4064-bca4-ae375c3c0ab7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.433906] env[62525]: DEBUG nova.network.neutron [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Port 3edc6d99-8711-4b37-869a-4e1238dc7a5a binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1707.441205] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: d2e7c558-02af-477c-b996-239ef14ed75b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1707.443945] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9704c3ec-447f-4220-9f93-05c68137a121 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.505360] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f13aade-9726-4181-8a1a-410319404551 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.520399] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b66c50-5bb1-458e-85a4-08fe65f712f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.549024] env[62525]: DEBUG nova.compute.provider_tree [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1707.806637] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3f434350-93cc-45eb-8965-b3194b4ae61d tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.581s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.830563] env[62525]: DEBUG nova.compute.manager [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Received event network-changed-ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.830563] env[62525]: DEBUG nova.compute.manager [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Refreshing instance network info cache due to event network-changed-ece5e526-9d41-4006-8159-5c2401d7fbbf. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1707.830661] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] Acquiring lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.830792] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] Acquired lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.830955] env[62525]: DEBUG nova.network.neutron [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Refreshing network info cache for port ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.949494] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 3455a540-7fbc-46ba-b7d6-84a345c0463e] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1708.051593] env[62525]: DEBUG nova.scheduler.client.report [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1708.454670] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 9cfd3e8c-b6a5-4406-b165-1ce46ba3f0f1] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1708.463329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.467383] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.467664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.561400] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.563409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.346s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.564128] env[62525]: DEBUG nova.objects.instance [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lazy-loading 'resources' on Instance uuid 54d1a1ed-0880-4cca-8759-585dc65bdb1a {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1708.599889] env[62525]: INFO nova.scheduler.client.report [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance 4278fbb1-d2bd-4e92-aaca-260d40aa26b1 [ 1708.641797] env[62525]: DEBUG nova.network.neutron [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updated VIF entry in instance network info cache for port ece5e526-9d41-4006-8159-5c2401d7fbbf. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.642188] env[62525]: DEBUG nova.network.neutron [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updating instance_info_cache with network_info: [{"id": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "address": "fa:16:3e:a5:85:3f", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece5e526-9d", "ovs_interfaceid": "ece5e526-9d41-4006-8159-5c2401d7fbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.963821] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: cb7f5d2b-18a2-4b35-9f81-8f8e7bb768a9] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1709.109942] env[62525]: DEBUG oslo_concurrency.lockutils [None req-be0031b5-9e3b-4b9d-a6f6-fcea1ad8485c tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "4278fbb1-d2bd-4e92-aaca-260d40aa26b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.029s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.150478] env[62525]: DEBUG oslo_concurrency.lockutils [req-7b0869d8-a86c-4c57-9fff-a99226e17ea7 req-a5ee54e9-44cc-4952-998d-57762e579e07 service nova] Releasing lock "refresh_cache-8abf0305-2000-4ffe-aa88-e2b355383ea3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.349779] env[62525]: INFO nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Rebuilding instance [ 1709.354381] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9e6abd-999d-4a20-901c-13c7f6a7acfb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.364099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60940f4a-9703-4944-bca8-897fae4ffedc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.406596] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428016cb-7297-4fd1-b26d-21f811de106f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.416582] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c6a1e2-79e5-4990-8084-285c3fd8a5f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.435786] env[62525]: DEBUG nova.compute.provider_tree [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1709.438087] env[62525]: DEBUG nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.439062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6d5dbd-fe06-4a2e-8f5c-5d00b1eb3e77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.467767] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f93669f2-c59d-4f3f-85a2-a60d714326ac] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1709.526943] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.527232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.527468] env[62525]: DEBUG nova.network.neutron [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1709.939125] env[62525]: DEBUG nova.scheduler.client.report [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1709.951994] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.955262] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1842190b-799d-4dec-af5e-b7f46e603972 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.966528] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1709.966528] env[62525]: value = "task-1781828" [ 1709.966528] env[62525]: _type = "Task" [ 1709.966528] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.973028] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: aa639aa3-d21c-4923-bc39-56e648c566fb] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1709.982199] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1709.982460] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.983287] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd42484-b145-4740-80a0-faaa0488463c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.000920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "ad4e94cc-d59c-4876-bf66-ec084350f875" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.001282] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.002473] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1710.003113] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5190bf37-d108-4693-841a-0b900c0fcece {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.097827] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1710.098161] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1710.098286] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1710.098706] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c1ed395-fa59-42fd-9a27-76c08088bcb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.105224] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1710.105224] env[62525]: value = "task-1781830" [ 1710.105224] env[62525]: _type = "Task" [ 1710.105224] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.114380] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.402254] env[62525]: DEBUG nova.network.neutron [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.448055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.451109] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.653s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.451704] env[62525]: DEBUG nova.objects.instance [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lazy-loading 'resources' on Instance uuid e8586018-100e-4729-97fc-98effa87cd9e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1710.477662] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 56cb0d0c-a7dd-4158-8bed-ddff050e0226] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1710.501945] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.502239] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.502480] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.502675] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.502847] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.505579] env[62525]: INFO nova.compute.manager [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Terminating instance [ 1710.507242] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1710.511492] env[62525]: DEBUG nova.compute.manager [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1710.511690] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1710.513383] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba547b5-5717-4df7-8478-85ac2d8640f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.521882] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1710.522140] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e27365df-c1b0-4084-b78f-e00661119f24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.529046] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1710.529046] env[62525]: value = "task-1781831" [ 1710.529046] env[62525]: _type = "Task" [ 1710.529046] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.538921] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.615524] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158516} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.615786] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1710.616122] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1710.616248] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1710.834962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.835215] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.905158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.983487] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 98334a1b-1a73-408f-93a4-6dc72764ebfc] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1710.989032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-18fe5c9b-8545-4352-93cc-ac147e13c873 tempest-ServerActionsV293TestJSON-811188686 tempest-ServerActionsV293TestJSON-811188686-project-member] Lock "54d1a1ed-0880-4cca-8759-585dc65bdb1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.629s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.029840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.042056] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781831, 'name': PowerOffVM_Task, 'duration_secs': 0.209948} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.042332] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1711.042498] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1711.042882] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4638fc25-2cc3-4721-96db-a76f7dbdadef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.200537] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1711.200537] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1711.200537] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleting the datastore file [datastore1] 94560d78-071c-419d-ad10-f42a5b2271a8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1711.200823] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-118b9c22-139f-4d90-97ee-84281cfce670 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.209311] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for the task: (returnval){ [ 1711.209311] env[62525]: value = "task-1781833" [ 1711.209311] env[62525]: _type = "Task" [ 1711.209311] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.221537] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.269842] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e33185-1155-498d-a96b-d012da39bc14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.277267] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b69208b-b1d7-4495-a840-ce8f473acff7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.310574] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0ee362-8e39-43b1-91be-0377cea98c43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.318575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3f6bb4-b343-4599-89d0-f3f4f9b82a5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.333442] env[62525]: DEBUG nova.compute.provider_tree [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.338046] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1711.432035] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de4afd4-bcb1-4c8f-9483-5f394486450e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.452694] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44176c74-9aa8-49a1-a175-e56daf2986aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.463060] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1711.488569] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: deef59c8-f710-434d-bddc-f63bb3d518b1] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1711.651342] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1711.651342] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1711.651342] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.651342] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1711.652867] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.653094] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1711.653351] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1711.653946] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1711.653946] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1711.653946] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1711.654141] env[62525]: DEBUG nova.virt.hardware [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1711.655320] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3285cef7-8af8-43a4-9a3b-ded71348873a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.665320] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73ad097-c0cc-485c-874c-744f875b08bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.680840] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:71:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ddb902b-b001-40bc-b635-a885589b1573', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1711.693018] env[62525]: DEBUG oslo.service.loopingcall [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.693333] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1711.695141] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6331942d-6607-4874-8509-7a2b60c99f14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.715177] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1711.715177] env[62525]: value = "task-1781834" [ 1711.715177] env[62525]: _type = "Task" [ 1711.715177] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.721654] env[62525]: DEBUG oslo_vmware.api [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Task: {'id': task-1781833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144453} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.722425] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1711.722425] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1711.722619] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1711.722772] env[62525]: INFO nova.compute.manager [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1711.723019] env[62525]: DEBUG oslo.service.loopingcall [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.723256] env[62525]: DEBUG nova.compute.manager [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1711.723353] env[62525]: DEBUG nova.network.neutron [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1711.728990] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781834, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.837363] env[62525]: DEBUG nova.scheduler.client.report [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.859297] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.974065] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1711.974397] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b91a3a-0251-47a6-84d9-383e4976f88c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.984606] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1711.984606] env[62525]: value = "task-1781835" [ 1711.984606] env[62525]: _type = "Task" [ 1711.984606] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.994125] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c7603ce8-8471-4813-9faf-3667a205893c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1711.997428] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.226585] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781834, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.264293] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.264468] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.292857] env[62525]: DEBUG nova.compute.manager [req-3263f225-375b-4684-aed4-2f79510bdc8f req-277a6688-56b3-41ba-a7cf-c057a8305142 service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Received event network-vif-deleted-1ac84b8a-0ab3-4332-910f-4710a6864b79 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1712.293036] env[62525]: INFO nova.compute.manager [req-3263f225-375b-4684-aed4-2f79510bdc8f req-277a6688-56b3-41ba-a7cf-c057a8305142 service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Neutron deleted interface 1ac84b8a-0ab3-4332-910f-4710a6864b79; detaching it from the instance and deleting it from the info cache [ 1712.293276] env[62525]: DEBUG nova.network.neutron [req-3263f225-375b-4684-aed4-2f79510bdc8f req-277a6688-56b3-41ba-a7cf-c057a8305142 service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.342198] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.344545] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.315s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.346041] env[62525]: INFO nova.compute.claims [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1712.372550] env[62525]: INFO nova.scheduler.client.report [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Deleted allocations for instance e8586018-100e-4729-97fc-98effa87cd9e [ 1712.496198] env[62525]: DEBUG oslo_vmware.api [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781835, 'name': PowerOnVM_Task, 'duration_secs': 0.462494} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.501023] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1712.501023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-47763515-b65d-4912-94a8-0858542d6d3e tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance '462bc19d-1eaa-4c57-8ebb-412a97614f03' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1712.506605] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8adc8b4b-1087-4a11-9ee8-d897f1aa83f3] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1712.727558] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781834, 'name': CreateVM_Task, 'duration_secs': 0.550368} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.727762] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1712.728590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.728661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.729087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1712.729390] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00af8fda-3d7c-4a10-875b-9a4e38673557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.734998] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1712.734998] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cb3305-dff3-11fb-408b-73cb56f8d296" [ 1712.734998] env[62525]: _type = "Task" [ 1712.734998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.743158] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cb3305-dff3-11fb-408b-73cb56f8d296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.778888] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1712.778888] env[62525]: DEBUG nova.network.neutron [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.798801] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20396208-584b-4953-a21d-00890d76f531 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.818256] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15aa05bf-ab4a-42ff-a33c-66b9ba655346 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.868177] env[62525]: DEBUG nova.compute.manager [req-3263f225-375b-4684-aed4-2f79510bdc8f req-277a6688-56b3-41ba-a7cf-c057a8305142 service nova] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Detach interface failed, port_id=1ac84b8a-0ab3-4332-910f-4710a6864b79, reason: Instance 94560d78-071c-419d-ad10-f42a5b2271a8 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1712.878692] env[62525]: DEBUG oslo_concurrency.lockutils [None req-206b9af0-b4eb-4805-be4e-3e6f68e714ac tempest-ServersNegativeTestJSON-1936644249 tempest-ServersNegativeTestJSON-1936644249-project-member] Lock "e8586018-100e-4729-97fc-98effa87cd9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.505s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.015035] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.015237] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances with incomplete migration {{(pid=62525) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1713.137634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdbbff5-d72c-4814-843e-ebeaf1c1c882 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.146420] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8aaf86-b654-4a89-b4d5-9bc47adae6ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.180260] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d48e42d-1173-4116-93f7-82e0df204ab1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.188275] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7711a078-ed40-4045-acad-be86dcd1f4e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.201885] env[62525]: DEBUG nova.compute.provider_tree [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.246899] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cb3305-dff3-11fb-408b-73cb56f8d296, 'name': SearchDatastore_Task, 'duration_secs': 0.011559} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.247330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.247574] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1713.247804] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.247956] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.248151] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1713.248413] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3243e0e9-e44c-4ec2-a4ba-cb2c9f6d1063 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.257214] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1713.257343] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1713.258080] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d2ed6d4-bb00-4dd9-8b1a-32ed2c23ec66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.263644] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1713.263644] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520581a3-846a-dd85-62d9-9d032c8b99d3" [ 1713.263644] env[62525]: _type = "Task" [ 1713.263644] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.272706] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520581a3-846a-dd85-62d9-9d032c8b99d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.283650] env[62525]: INFO nova.compute.manager [-] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Took 1.56 seconds to deallocate network for instance. [ 1713.297274] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.522180] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.706687] env[62525]: DEBUG nova.scheduler.client.report [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1713.774311] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520581a3-846a-dd85-62d9-9d032c8b99d3, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.775949] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06817712-d4c7-488e-8729-04a0f4e0ed11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.781275] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1713.781275] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52043c97-5307-fbb9-9c28-ffcba9f6c98f" [ 1713.781275] env[62525]: _type = "Task" [ 1713.781275] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.792050] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52043c97-5307-fbb9-9c28-ffcba9f6c98f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.792527] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.211352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.866s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.211983] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1714.215292] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.356s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.216751] env[62525]: INFO nova.compute.claims [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1714.295218] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52043c97-5307-fbb9-9c28-ffcba9f6c98f, 'name': SearchDatastore_Task, 'duration_secs': 0.01018} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.295555] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.295825] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1714.296361] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-808e5129-33fd-4456-9108-e33bd80a2070 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.303878] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1714.303878] env[62525]: value = "task-1781836" [ 1714.303878] env[62525]: _type = "Task" [ 1714.303878] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.313976] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.717092] env[62525]: DEBUG nova.compute.utils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1714.718812] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1714.719071] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1714.768558] env[62525]: DEBUG nova.policy [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e50433248fb4eb088e90d25fcb67c7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3d5c15d37145aa84818a2ad88f307f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1714.814496] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507013} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.814777] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1714.814990] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1714.815266] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc4396c3-db48-4a6f-9e28-77207eccd257 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.822465] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1714.822465] env[62525]: value = "task-1781837" [ 1714.822465] env[62525]: _type = "Task" [ 1714.822465] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.831520] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.062174] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.062461] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.062643] env[62525]: DEBUG nova.compute.manager [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Going to confirm migration 1 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1715.144317] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Successfully created port: 170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1715.222010] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1715.342941] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079613} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.342941] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1715.343706] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0117c651-945f-42d7-8b4a-3ef2a59d5afa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.383811] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1715.387529] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e59f92-c515-428f-b3ef-b2f5f5a31b9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.421199] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1715.421199] env[62525]: value = "task-1781838" [ 1715.421199] env[62525]: _type = "Task" [ 1715.421199] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.436147] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781838, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.556921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db9c695-05a3-4943-8b1a-ec6edb3c7afc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.564584] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2dfb7d-a9d7-4db1-8e98-86083236a009 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.598250] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12f28cf-5c55-4e16-b35f-a3b985aef84d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.605879] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94a0996-c34d-4dfe-a076-435218dddc5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.619232] env[62525]: DEBUG nova.compute.provider_tree [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.649504] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.649681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.649854] env[62525]: DEBUG nova.network.neutron [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.650048] env[62525]: DEBUG nova.objects.instance [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lazy-loading 'info_cache' on Instance uuid 462bc19d-1eaa-4c57-8ebb-412a97614f03 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1715.934476] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781838, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.123280] env[62525]: DEBUG nova.scheduler.client.report [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1716.236368] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1716.261260] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1716.261502] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1716.261658] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1716.261834] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1716.261977] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1716.262136] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1716.262336] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1716.262492] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1716.262652] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1716.262822] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1716.263053] env[62525]: DEBUG nova.virt.hardware [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1716.263895] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99939294-38b6-4ebd-b12d-2a68ba2c71da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.272171] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f92f76d-2b6d-44fa-82d4-e1eb6ddb2de5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.432695] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781838, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.558858] env[62525]: DEBUG nova.compute.manager [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Received event network-vif-plugged-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1716.559035] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] Acquiring lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.559360] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.559584] env[62525]: DEBUG oslo_concurrency.lockutils [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.559867] env[62525]: DEBUG nova.compute.manager [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] No waiting events found dispatching network-vif-plugged-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1716.559934] env[62525]: WARNING nova.compute.manager [req-4ed6c3af-8e35-4558-b012-30da71c84ffd req-e270b408-d8be-4dad-8d98-978ff87f35c2 service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Received unexpected event network-vif-plugged-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd for instance with vm_state building and task_state spawning. [ 1716.628262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.628806] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1716.631324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.334s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.633168] env[62525]: INFO nova.compute.claims [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.646732] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Successfully updated port: 170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1716.849027] env[62525]: DEBUG nova.network.neutron [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [{"id": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "address": "fa:16:3e:5e:3a:18", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3edc6d99-87", "ovs_interfaceid": "3edc6d99-8711-4b37-869a-4e1238dc7a5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.933881] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781838, 'name': ReconfigVM_Task, 'duration_secs': 1.289233} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.934238] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c/3b1a825f-b6a5-4822-86a5-57972f34748c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1716.934947] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5aeca94f-cd4f-43eb-9116-2d9642e4288a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.941175] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1716.941175] env[62525]: value = "task-1781839" [ 1716.941175] env[62525]: _type = "Task" [ 1716.941175] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.948693] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781839, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.136919] env[62525]: DEBUG nova.compute.utils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1717.140056] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1717.140234] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1717.148130] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.148130] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.148274] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1717.201127] env[62525]: DEBUG nova.policy [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98769d9ddf744118910ce61bcf47f145', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c87f1997d5c4739850790da5dd969fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1717.353159] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-462bc19d-1eaa-4c57-8ebb-412a97614f03" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.353159] env[62525]: DEBUG nova.objects.instance [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lazy-loading 'migration_context' on Instance uuid 462bc19d-1eaa-4c57-8ebb-412a97614f03 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.451365] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781839, 'name': Rename_Task, 'duration_secs': 0.139542} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.453980] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1717.453980] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-611844b7-389b-48c0-951b-ec91262246f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.461014] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1717.461014] env[62525]: value = "task-1781840" [ 1717.461014] env[62525]: _type = "Task" [ 1717.461014] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.471019] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.505528] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Successfully created port: 78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.643661] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1717.694785] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1717.840121] env[62525]: DEBUG nova.network.neutron [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Updating instance_info_cache with network_info: [{"id": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "address": "fa:16:3e:85:73:8c", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap170fce2e-ba", "ovs_interfaceid": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.858104] env[62525]: DEBUG nova.objects.base [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Object Instance<462bc19d-1eaa-4c57-8ebb-412a97614f03> lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1717.859934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b559c62-4ca9-4e6e-8d15-32b11903a2f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.884052] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bb8d539-50a9-4a8e-838f-54c4b08971ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.890853] env[62525]: DEBUG oslo_vmware.api [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1717.890853] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526e67e8-e7bf-5116-a62a-11d14b686bd2" [ 1717.890853] env[62525]: _type = "Task" [ 1717.890853] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.899856] env[62525]: DEBUG oslo_vmware.api [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526e67e8-e7bf-5116-a62a-11d14b686bd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.926598] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7b91ab-482d-4b51-9640-e158e19b9fd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.934091] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783b49eb-044e-4060-b87e-d88f0dba6b91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.969513] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e319d5e-f9bf-4912-b6e6-5bfc5e9f2e0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.977160] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781840, 'name': PowerOnVM_Task, 'duration_secs': 0.469812} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.979169] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1717.979376] env[62525]: DEBUG nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1717.980136] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e755a7-9476-43e8-8209-c4af4c0b00e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.983283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b39ad1-8e41-48c7-8bd7-3688cee6bb63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.000560] env[62525]: DEBUG nova.compute.provider_tree [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.342762] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.343076] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance network_info: |[{"id": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "address": "fa:16:3e:85:73:8c", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap170fce2e-ba", "ovs_interfaceid": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1718.343496] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:73:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '170fce2e-bab4-4fa8-b3b6-6067a35d8fbd', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1718.351726] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating folder: Project (1f3d5c15d37145aa84818a2ad88f307f). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1718.352019] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3462bf17-487a-42a5-96f0-1281dca82da5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.362598] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created folder: Project (1f3d5c15d37145aa84818a2ad88f307f) in parent group-v369553. [ 1718.362784] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating folder: Instances. Parent ref: group-v369810. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1718.363016] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2cdcbcf-6ed8-4562-8f85-0b5424033c88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.372111] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created folder: Instances in parent group-v369810. [ 1718.372338] env[62525]: DEBUG oslo.service.loopingcall [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.372520] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1718.372720] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d2670dc-df16-4871-bc44-572d7480ff7c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.392054] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1718.392054] env[62525]: value = "task-1781843" [ 1718.392054] env[62525]: _type = "Task" [ 1718.392054] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.402551] env[62525]: DEBUG oslo_vmware.api [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526e67e8-e7bf-5116-a62a-11d14b686bd2, 'name': SearchDatastore_Task, 'duration_secs': 0.008524} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.405473] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.405686] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781843, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.503953] env[62525]: INFO nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] bringing vm to original state: 'stopped' [ 1718.507133] env[62525]: DEBUG nova.scheduler.client.report [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1718.585447] env[62525]: DEBUG nova.compute.manager [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Received event network-changed-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1718.585728] env[62525]: DEBUG nova.compute.manager [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Refreshing instance network info cache due to event network-changed-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1718.585928] env[62525]: DEBUG oslo_concurrency.lockutils [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] Acquiring lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.586084] env[62525]: DEBUG oslo_concurrency.lockutils [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] Acquired lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.586247] env[62525]: DEBUG nova.network.neutron [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Refreshing network info cache for port 170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1718.657480] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1718.685452] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1718.685907] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1718.686242] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1718.686596] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1718.687285] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1718.687285] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1718.687388] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1718.687630] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1718.687894] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1718.688171] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1718.688448] env[62525]: DEBUG nova.virt.hardware [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1718.689726] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1d09c7-be47-427e-a93f-8918530dc35d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.701342] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424e2365-a253-4ec1-b407-0e0cbff15368 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.906801] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781843, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.013341] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.014349] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1719.020088] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.225s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.020088] env[62525]: DEBUG nova.objects.instance [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lazy-loading 'resources' on Instance uuid 94560d78-071c-419d-ad10-f42a5b2271a8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.272686] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.273688] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.280885] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Successfully updated port: 78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1719.359355] env[62525]: DEBUG nova.network.neutron [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Updated VIF entry in instance network info cache for port 170fce2e-bab4-4fa8-b3b6-6067a35d8fbd. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1719.359719] env[62525]: DEBUG nova.network.neutron [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Updating instance_info_cache with network_info: [{"id": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "address": "fa:16:3e:85:73:8c", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap170fce2e-ba", "ovs_interfaceid": "170fce2e-bab4-4fa8-b3b6-6067a35d8fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.406526] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781843, 'name': CreateVM_Task, 'duration_secs': 0.515503} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.406709] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1719.407404] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.407573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.407968] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1719.408170] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d935b283-aa7a-427d-8361-8e2bf0215f68 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.412669] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1719.412669] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52822504-c4aa-542c-b817-78c6ff9e9de6" [ 1719.412669] env[62525]: _type = "Task" [ 1719.412669] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.420286] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52822504-c4aa-542c-b817-78c6ff9e9de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.519532] env[62525]: DEBUG nova.compute.utils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1719.521025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.521256] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.521564] env[62525]: DEBUG nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1719.524571] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1719.525040] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1719.527241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc53c48-f1c9-4590-b7f8-0efce8f9870c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.537424] env[62525]: DEBUG nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1719.540951] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1719.541442] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7373f113-64fd-42df-bfa0-0b92e750314c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.548494] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1719.548494] env[62525]: value = "task-1781844" [ 1719.548494] env[62525]: _type = "Task" [ 1719.548494] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.559107] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.570579] env[62525]: DEBUG nova.policy [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1719.769313] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a0be07-c9ba-48a9-8526-e05a629f0487 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.776815] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347236b9-8d4b-448e-8040-3a31022bcf3f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.781681] env[62525]: INFO nova.compute.manager [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Detaching volume f910a09f-577c-4fc0-bd96-40dafac718ab [ 1719.809985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.810103] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.810258] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1719.814940] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f617ec67-773c-4565-94eb-636d55016988 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.823259] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7669144-8808-4735-9a56-65eae24853cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.827923] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Successfully created port: e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1719.840039] env[62525]: DEBUG nova.compute.provider_tree [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.844130] env[62525]: INFO nova.virt.block_device [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Attempting to driver detach volume f910a09f-577c-4fc0-bd96-40dafac718ab from mountpoint /dev/sdb [ 1719.844352] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1719.844539] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369792', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'name': 'volume-f910a09f-577c-4fc0-bd96-40dafac718ab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0067de08-6708-4c7c-a83a-ed9df193d5cd', 'attached_at': '', 'detached_at': '', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'serial': 'f910a09f-577c-4fc0-bd96-40dafac718ab'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1719.845298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400bd20a-2e75-48d1-a84c-4c212535a888 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.867407] env[62525]: DEBUG oslo_concurrency.lockutils [req-49a0516f-5ae8-4c9c-ae6a-af540c3d8a44 req-e234f859-5273-4e74-82b4-b268c850c12f service nova] Releasing lock "refresh_cache-ad4e94cc-d59c-4876-bf66-ec084350f875" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.868231] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254eed94-f5f8-4adf-a63a-52b51570fd17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.874750] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce47aa6a-dbb1-4e32-b2c9-b75a84d4ecf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.895737] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36acd54b-1877-464e-83b1-8e461a74486c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.913088] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] The volume has not been displaced from its original location: [datastore1] volume-f910a09f-577c-4fc0-bd96-40dafac718ab/volume-f910a09f-577c-4fc0-bd96-40dafac718ab.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1719.919243] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1719.919626] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42d721ca-919f-4afc-9518-54ae79917e9a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.941158] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52822504-c4aa-542c-b817-78c6ff9e9de6, 'name': SearchDatastore_Task, 'duration_secs': 0.011044} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.942506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.942747] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1719.942973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.943213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.943420] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.943740] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1719.943740] env[62525]: value = "task-1781845" [ 1719.943740] env[62525]: _type = "Task" [ 1719.943740] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.943951] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cbf4b3c-1111-4cd5-8da9-3ecb4cb590ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.953669] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781845, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.956173] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.956340] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.957058] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-159ecdb7-60cb-4644-a871-e0414c3842e3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.962121] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1719.962121] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52664905-a1a6-c1af-eda1-68809b3f3c21" [ 1719.962121] env[62525]: _type = "Task" [ 1719.962121] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.970416] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52664905-a1a6-c1af-eda1-68809b3f3c21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.022798] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1720.058537] env[62525]: DEBUG oslo_vmware.api [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781844, 'name': PowerOffVM_Task, 'duration_secs': 0.202769} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.058916] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1720.059033] env[62525]: DEBUG nova.compute.manager [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1720.060260] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa8759f-6ed5-4780-8332-dc524e426a7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.342837] env[62525]: DEBUG nova.scheduler.client.report [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1720.346822] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1720.455465] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781845, 'name': ReconfigVM_Task, 'duration_secs': 0.255967} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.455753] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1720.460514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-141f8f22-743f-4105-988a-80375f4ba3fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.487242] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52664905-a1a6-c1af-eda1-68809b3f3c21, 'name': SearchDatastore_Task, 'duration_secs': 0.009579} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.489434] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1720.489434] env[62525]: value = "task-1781846" [ 1720.489434] env[62525]: _type = "Task" [ 1720.489434] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.489660] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a613b87c-3dab-401c-9830-375a13387c39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.501045] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1720.501045] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52415705-8551-e4c8-702e-f934a9a9bc70" [ 1720.501045] env[62525]: _type = "Task" [ 1720.501045] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.504341] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781846, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.512754] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52415705-8551-e4c8-702e-f934a9a9bc70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.576723] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.054s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.645085] env[62525]: DEBUG nova.network.neutron [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.693260] env[62525]: DEBUG nova.compute.manager [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Received event network-vif-plugged-78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.693260] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.693260] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.693260] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.693260] env[62525]: DEBUG nova.compute.manager [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] No waiting events found dispatching network-vif-plugged-78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1720.693260] env[62525]: WARNING nova.compute.manager [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Received unexpected event network-vif-plugged-78deceee-4409-4d65-b4f0-dfc4e932c381 for instance with vm_state building and task_state spawning. [ 1720.693498] env[62525]: DEBUG nova.compute.manager [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Received event network-changed-78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.693531] env[62525]: DEBUG nova.compute.manager [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Refreshing instance network info cache due to event network-changed-78deceee-4409-4d65-b4f0-dfc4e932c381. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1720.693702] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Acquiring lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.849697] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.852680] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.447s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.875827] env[62525]: INFO nova.scheduler.client.report [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Deleted allocations for instance 94560d78-071c-419d-ad10-f42a5b2271a8 [ 1721.002042] env[62525]: DEBUG oslo_vmware.api [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781846, 'name': ReconfigVM_Task, 'duration_secs': 0.155238} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.002360] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369792', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'name': 'volume-f910a09f-577c-4fc0-bd96-40dafac718ab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0067de08-6708-4c7c-a83a-ed9df193d5cd', 'attached_at': '', 'detached_at': '', 'volume_id': 'f910a09f-577c-4fc0-bd96-40dafac718ab', 'serial': 'f910a09f-577c-4fc0-bd96-40dafac718ab'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1721.012879] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52415705-8551-e4c8-702e-f934a9a9bc70, 'name': SearchDatastore_Task, 'duration_secs': 0.029913} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.013151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.013409] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1721.014027] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9682fd2-d463-46a1-86fe-dca824dc26da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.020143] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1721.020143] env[62525]: value = "task-1781847" [ 1721.020143] env[62525]: _type = "Task" [ 1721.020143] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.027837] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.035025] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1721.061212] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1721.061455] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1721.061652] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1721.061882] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1721.062109] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1721.062318] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1721.062567] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1721.062754] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1721.062993] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1721.063194] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1721.063408] env[62525]: DEBUG nova.virt.hardware [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1721.064412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff77de0-a72b-4926-b3c2-26efc9b382e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.073333] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6611262-5734-4d74-b1d3-8d587d461d6e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.090727] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.147864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.148257] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Instance network_info: |[{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1721.148611] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Acquired lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.149308] env[62525]: DEBUG nova.network.neutron [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Refreshing network info cache for port 78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1721.150153] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:e8:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78deceee-4409-4d65-b4f0-dfc4e932c381', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1721.157953] env[62525]: DEBUG oslo.service.loopingcall [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.159173] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1721.159259] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a96e8c2c-3740-4b59-8d7f-71af669248fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.180612] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1721.180612] env[62525]: value = "task-1781848" [ 1721.180612] env[62525]: _type = "Task" [ 1721.180612] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.189086] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781848, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.258278] env[62525]: DEBUG nova.compute.manager [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Received event network-vif-plugged-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1721.258545] env[62525]: DEBUG oslo_concurrency.lockutils [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] Acquiring lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.259251] env[62525]: DEBUG oslo_concurrency.lockutils [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.259455] env[62525]: DEBUG oslo_concurrency.lockutils [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.259632] env[62525]: DEBUG nova.compute.manager [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] No waiting events found dispatching network-vif-plugged-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1721.259800] env[62525]: WARNING nova.compute.manager [req-1910fcf3-f63a-4a01-8755-438fff130f87 req-8b64a87b-f3bf-48e7-8fcf-0ff60d4564b2 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Received unexpected event network-vif-plugged-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c for instance with vm_state building and task_state spawning. [ 1721.365657] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Successfully updated port: e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.383754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-7f8e7563-d790-4773-ba87-1728ee49543f tempest-VolumesAdminNegativeTest-1783722269 tempest-VolumesAdminNegativeTest-1783722269-project-member] Lock "94560d78-071c-419d-ad10-f42a5b2271a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.881s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.532653] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781847, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.542735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.542735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.543088] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.543340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.543639] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.546140] env[62525]: INFO nova.compute.manager [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Terminating instance [ 1721.552409] env[62525]: DEBUG nova.objects.instance [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'flavor' on Instance uuid 0067de08-6708-4c7c-a83a-ed9df193d5cd {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.554353] env[62525]: DEBUG nova.compute.manager [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1721.554828] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1721.556326] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340bfa09-7650-4f86-af9e-434f2e783a36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.567805] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1721.570894] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab05f9e1-89f1-4950-82a4-bb70043685ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.630539] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3e283c-d89d-4995-a705-68acdf1dde72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.637952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d516e72-0550-4c43-bcf7-851fef22d08d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.671868] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012cc4bd-6753-4b9c-ae5f-9892d5526b95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.680723] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3462875f-47b0-426e-9ff9-7a79160a6456 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.700535] env[62525]: DEBUG nova.compute.provider_tree [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.704977] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781848, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.870500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.870500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.870500] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.993421] env[62525]: DEBUG nova.network.neutron [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updated VIF entry in instance network info cache for port 78deceee-4409-4d65-b4f0-dfc4e932c381. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1721.993764] env[62525]: DEBUG nova.network.neutron [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.031115] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671496} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.031414] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.031536] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.031794] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84d46a4b-16f3-4eaa-bc65-84d41e5de80a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.038616] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1722.038616] env[62525]: value = "task-1781850" [ 1722.038616] env[62525]: _type = "Task" [ 1722.038616] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.046304] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.201339] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781848, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.206508] env[62525]: DEBUG nova.scheduler.client.report [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.418157] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1722.418398] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1722.418576] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 3b1a825f-b6a5-4822-86a5-57972f34748c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.418861] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d3ebbfb-281a-4700-a5c0-426268a17ab6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.425915] env[62525]: DEBUG oslo_vmware.api [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1722.425915] env[62525]: value = "task-1781851" [ 1722.425915] env[62525]: _type = "Task" [ 1722.425915] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.426709] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.437175] env[62525]: DEBUG oslo_vmware.api [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.496985] env[62525]: DEBUG oslo_concurrency.lockutils [req-872f4b67-1a34-43b2-897c-f617bb1eb0eb req-723bc78e-db85-4cb0-962c-8367556dff4c service nova] Releasing lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.548822] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083532} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.549170] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1722.550099] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf64676-0ccd-4a79-aa06-c4b1f8d40a5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.564835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-b434cfee-4dc8-4a60-9cd1-6fc503ab683e tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.291s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.574672] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1722.575128] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa2414fe-8927-4d8c-9914-0748c1b43d3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.600097] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1722.600097] env[62525]: value = "task-1781852" [ 1722.600097] env[62525]: _type = "Task" [ 1722.600097] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.605713] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781852, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.651582] env[62525]: DEBUG nova.network.neutron [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Updating instance_info_cache with network_info: [{"id": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "address": "fa:16:3e:b0:9f:39", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape889a50a-6d", "ovs_interfaceid": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.701099] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781848, 'name': CreateVM_Task, 'duration_secs': 1.327121} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.701099] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1722.701099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.701099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.701099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1722.701099] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-579e1df6-a30d-42bc-b593-02bba74199ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.708213] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1722.708213] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5208e21a-2970-0041-8c68-d6740d813798" [ 1722.708213] env[62525]: _type = "Task" [ 1722.708213] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.718736] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5208e21a-2970-0041-8c68-d6740d813798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.941014] env[62525]: DEBUG oslo_vmware.api [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159853} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.942173] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1722.942371] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1722.942662] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1722.943043] env[62525]: INFO nova.compute.manager [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Took 1.39 seconds to destroy the instance on the hypervisor. [ 1722.943043] env[62525]: DEBUG oslo.service.loopingcall [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.943219] env[62525]: DEBUG nova.compute.manager [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1722.943315] env[62525]: DEBUG nova.network.neutron [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1723.107602] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781852, 'name': ReconfigVM_Task, 'duration_secs': 0.28724} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.107988] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Reconfigured VM instance instance-0000005c to attach disk [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1723.108618] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-484abbdf-c63e-4fcd-b681-c8a404132e6e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.115631] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1723.115631] env[62525]: value = "task-1781853" [ 1723.115631] env[62525]: _type = "Task" [ 1723.115631] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.123869] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781853, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.154619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.155054] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Instance network_info: |[{"id": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "address": "fa:16:3e:b0:9f:39", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape889a50a-6d", "ovs_interfaceid": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1723.155436] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:9f:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e889a50a-6d0d-4673-bbd6-be0d2a72bd1c', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1723.163133] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating folder: Project (3c56f465d1a641a99458904c04137621). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.163422] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f67a153-f1b6-4bcc-9ea1-476b15ab616d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.174783] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created folder: Project (3c56f465d1a641a99458904c04137621) in parent group-v369553. [ 1723.174991] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating folder: Instances. Parent ref: group-v369814. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.175250] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1e7a3d0-bf75-4ea4-8183-ae56e32b655c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.184788] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created folder: Instances in parent group-v369814. [ 1723.185033] env[62525]: DEBUG oslo.service.loopingcall [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.185230] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1723.185441] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afa06dac-4403-4d1f-926c-ba6f11bc2148 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.204916] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1723.204916] env[62525]: value = "task-1781856" [ 1723.204916] env[62525]: _type = "Task" [ 1723.204916] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.217177] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.365s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.219910] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781856, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.223619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.133s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.223829] env[62525]: DEBUG nova.objects.instance [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1723.226277] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5208e21a-2970-0041-8c68-d6740d813798, 'name': SearchDatastore_Task, 'duration_secs': 0.011394} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.226806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.227047] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1723.227430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.227430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.228664] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1723.228664] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c232b00-7fd6-480f-b0f6-74d130e25eb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.237410] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1723.237619] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1723.238743] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a961c52-0f57-43dc-a067-da676d50c0e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.244652] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1723.244652] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52308d89-9578-0843-9d86-116b8e653109" [ 1723.244652] env[62525]: _type = "Task" [ 1723.244652] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.253832] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52308d89-9578-0843-9d86-116b8e653109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.297929] env[62525]: DEBUG nova.compute.manager [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Received event network-changed-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.298118] env[62525]: DEBUG nova.compute.manager [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Refreshing instance network info cache due to event network-changed-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1723.298327] env[62525]: DEBUG oslo_concurrency.lockutils [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] Acquiring lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.298462] env[62525]: DEBUG oslo_concurrency.lockutils [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] Acquired lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.298626] env[62525]: DEBUG nova.network.neutron [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Refreshing network info cache for port e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.625709] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781853, 'name': Rename_Task, 'duration_secs': 0.151099} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.625989] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1723.626269] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14f9a4cb-809f-48ff-86bd-b7fa190c9d5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.633382] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1723.633382] env[62525]: value = "task-1781857" [ 1723.633382] env[62525]: _type = "Task" [ 1723.633382] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.640755] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.715561] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781856, 'name': CreateVM_Task, 'duration_secs': 0.503782} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.715742] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1723.716441] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.716608] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.717010] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1723.717269] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cc6c462-a5e0-4f0c-87dc-d93ff5b75e38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.721729] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1723.721729] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e7206a-7edd-c84d-386b-6ed0a2df3413" [ 1723.721729] env[62525]: _type = "Task" [ 1723.721729] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.734423] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e7206a-7edd-c84d-386b-6ed0a2df3413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.754019] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52308d89-9578-0843-9d86-116b8e653109, 'name': SearchDatastore_Task, 'duration_secs': 0.009558} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.757574] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad03e58-f2c6-4e20-b91b-2946f9ad0e8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.762950] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1723.762950] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52babad6-4033-5dab-a353-61ea63c3e463" [ 1723.762950] env[62525]: _type = "Task" [ 1723.762950] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.770701] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.770968] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.771188] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.771375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.771557] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.773076] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52babad6-4033-5dab-a353-61ea63c3e463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.774553] env[62525]: INFO nova.compute.manager [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Terminating instance [ 1723.776311] env[62525]: DEBUG nova.compute.manager [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1723.776498] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1723.777522] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16472c48-d016-4eea-b944-91be5c71c2b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.784177] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1723.784425] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0498d058-9e04-4c98-81a8-02ab0951c042 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.790878] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1723.790878] env[62525]: value = "task-1781858" [ 1723.790878] env[62525]: _type = "Task" [ 1723.790878] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.794748] env[62525]: INFO nova.scheduler.client.report [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleted allocation for migration 52a5775e-aa5d-4847-8a57-847f128e703a [ 1723.800903] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.825226] env[62525]: DEBUG nova.network.neutron [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.966150] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "2f713b35-9d07-4d25-a333-506fd2469bd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.966423] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.966743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.966990] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.967226] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.969331] env[62525]: INFO nova.compute.manager [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Terminating instance [ 1723.971955] env[62525]: DEBUG nova.compute.manager [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1723.972118] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1723.973290] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc16852-8a91-46a7-a9e1-0d78fe088180 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.981993] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1723.982245] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-373ed35f-ed4e-4fd4-8bc3-f664df2e74a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.988846] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1723.988846] env[62525]: value = "task-1781859" [ 1723.988846] env[62525]: _type = "Task" [ 1723.988846] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.001357] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.144448] env[62525]: DEBUG oslo_vmware.api [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781857, 'name': PowerOnVM_Task, 'duration_secs': 0.486712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.144805] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1724.145025] env[62525]: INFO nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1724.145274] env[62525]: DEBUG nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1724.146019] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706c0665-e89b-4497-917c-63dc47e1268f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.163051] env[62525]: DEBUG nova.network.neutron [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Updated VIF entry in instance network info cache for port e889a50a-6d0d-4673-bbd6-be0d2a72bd1c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.163426] env[62525]: DEBUG nova.network.neutron [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Updating instance_info_cache with network_info: [{"id": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "address": "fa:16:3e:b0:9f:39", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape889a50a-6d", "ovs_interfaceid": "e889a50a-6d0d-4673-bbd6-be0d2a72bd1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.233429] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e7206a-7edd-c84d-386b-6ed0a2df3413, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.233735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.233984] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1724.234255] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.238539] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf6b352d-9cc7-46b4-8a0b-f6f488aff3ad tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.273297] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52babad6-4033-5dab-a353-61ea63c3e463, 'name': SearchDatastore_Task, 'duration_secs': 0.008859} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.273563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.273840] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1724.274137] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.274327] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1724.274533] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc5b1629-b0a8-4e3a-9eb3-a8764bdfde5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.276754] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fb2a4b7-4c3d-4412-85b6-ac629be48182 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.283335] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1724.283335] env[62525]: value = "task-1781860" [ 1724.283335] env[62525]: _type = "Task" [ 1724.283335] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.287334] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1724.287508] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1724.288471] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b793418-74a9-4552-8433-6f243b946b91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.294493] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.299120] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1724.299120] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52da0ca6-b51c-5090-36d5-194a20925c29" [ 1724.299120] env[62525]: _type = "Task" [ 1724.299120] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.305388] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ac25bd93-e6bc-4403-91ed-72530bd34152 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.243s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.306500] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781858, 'name': PowerOffVM_Task, 'duration_secs': 0.189591} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.307260] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.307426] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1724.307669] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54d7c4d7-0703-4369-b946-bf46ac13eee8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.312278] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52da0ca6-b51c-5090-36d5-194a20925c29, 'name': SearchDatastore_Task, 'duration_secs': 0.008247} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.313332] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b505ab8-9ffb-4497-a5b6-53d8932d5222 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.319255] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1724.319255] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528a6292-f3cd-0351-c773-c364b834e645" [ 1724.319255] env[62525]: _type = "Task" [ 1724.319255] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.327968] env[62525]: INFO nova.compute.manager [-] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Took 1.38 seconds to deallocate network for instance. [ 1724.328475] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528a6292-f3cd-0351-c773-c364b834e645, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.388168] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1724.388384] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1724.388565] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleting the datastore file [datastore1] 0067de08-6708-4c7c-a83a-ed9df193d5cd {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.388848] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94785015-bfb4-4253-baa4-77da0a600880 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.396160] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1724.396160] env[62525]: value = "task-1781862" [ 1724.396160] env[62525]: _type = "Task" [ 1724.396160] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.405022] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.500864] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781859, 'name': PowerOffVM_Task, 'duration_secs': 0.19471} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.500864] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.500864] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1724.501095] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca65f7e4-7015-413f-8d8e-ad4128cc5e8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.613173] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1724.613173] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1724.613173] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleting the datastore file [datastore1] 2f713b35-9d07-4d25-a333-506fd2469bd5 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.613173] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77921128-785e-41b6-b94f-667bfe36fcf2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.624821] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1724.624821] env[62525]: value = "task-1781864" [ 1724.624821] env[62525]: _type = "Task" [ 1724.624821] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.634604] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.663352] env[62525]: INFO nova.compute.manager [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Took 13.65 seconds to build instance. [ 1724.669822] env[62525]: DEBUG oslo_concurrency.lockutils [req-65380752-800b-4262-974c-f3086437efa9 req-71046b30-c6f3-444a-9b78-6de862313184 service nova] Releasing lock "refresh_cache-130a3015-6caf-4374-a35f-9dd49bb8b3bf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.798434] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486371} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.798434] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1724.798434] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1724.798434] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6de3a264-c592-4519-abb4-3afcc13e52b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.807026] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1724.807026] env[62525]: value = "task-1781865" [ 1724.807026] env[62525]: _type = "Task" [ 1724.807026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.812378] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.830299] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528a6292-f3cd-0351-c773-c364b834e645, 'name': SearchDatastore_Task, 'duration_secs': 0.008866} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.830743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.831121] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 130a3015-6caf-4374-a35f-9dd49bb8b3bf/130a3015-6caf-4374-a35f-9dd49bb8b3bf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1724.831517] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d27a982e-78b9-4b70-9d80-c10ffabb76dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.834353] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.834717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.835038] env[62525]: DEBUG nova.objects.instance [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'resources' on Instance uuid 3b1a825f-b6a5-4822-86a5-57972f34748c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.845335] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1724.845335] env[62525]: value = "task-1781866" [ 1724.845335] env[62525]: _type = "Task" [ 1724.845335] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.854637] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.908975] env[62525]: DEBUG oslo_vmware.api [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437646} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.911026] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.911026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1724.911026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1724.911026] env[62525]: INFO nova.compute.manager [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1724.911026] env[62525]: DEBUG oslo.service.loopingcall [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.911026] env[62525]: DEBUG nova.compute.manager [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1724.911026] env[62525]: DEBUG nova.network.neutron [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1725.137280] env[62525]: DEBUG oslo_vmware.api [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271798} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.137280] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1725.137280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1725.137280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1725.137280] env[62525]: INFO nova.compute.manager [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1725.137280] env[62525]: DEBUG oslo.service.loopingcall [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.137280] env[62525]: DEBUG nova.compute.manager [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1725.137280] env[62525]: DEBUG nova.network.neutron [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1725.154389] env[62525]: INFO nova.compute.manager [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Rebuilding instance [ 1725.164904] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fb99973-0f87-40eb-a95a-f22d876e87a6 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.163s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.218359] env[62525]: DEBUG nova.compute.manager [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1725.219288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc363b6-b0ab-4ad1-964c-675d3bd7ab98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.314833] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077087} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.314833] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1725.315390] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a7df56-6b3c-43c9-9479-a55fee43546d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.338931] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1725.342864] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdc8e599-27f5-451c-b24f-506ad8f4d7f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.359037] env[62525]: DEBUG nova.compute.manager [req-3bed6838-2fbd-4531-9b28-4c1be416f531 req-201683e1-4ecd-4c97-926e-c7729116c84e service nova] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Received event network-vif-deleted-3ddb902b-b001-40bc-b635-a885589b1573 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1725.369743] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509883} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.371037] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 130a3015-6caf-4374-a35f-9dd49bb8b3bf/130a3015-6caf-4374-a35f-9dd49bb8b3bf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1725.371263] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1725.371568] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1725.371568] env[62525]: value = "task-1781867" [ 1725.371568] env[62525]: _type = "Task" [ 1725.371568] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.371758] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c008a236-0609-406f-afbf-30042cebf5dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.384463] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.385839] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1725.385839] env[62525]: value = "task-1781868" [ 1725.385839] env[62525]: _type = "Task" [ 1725.385839] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.396514] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.654701] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e1d8b2-b008-449c-b00b-653f121b6fea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.665926] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae52164-6800-478f-bab5-adf66d81d8e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.701392] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10072d03-23e7-4580-8edd-416f4271f7f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.713911] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4f5d05-6685-4da1-8111-3f3dca6a662e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.728114] env[62525]: DEBUG nova.compute.provider_tree [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.738151] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1725.738151] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e33f81e1-a11d-4c0f-bb3b-43ac072134af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.745938] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1725.745938] env[62525]: value = "task-1781869" [ 1725.745938] env[62525]: _type = "Task" [ 1725.745938] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.761754] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.861189] env[62525]: DEBUG nova.compute.manager [req-fcc59e5a-eceb-4e73-a79c-299263626c54 req-0f4678df-3aa4-45df-8d68-60c711786b3e service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Received event network-vif-deleted-6d3644a6-dbaa-4a30-930a-53beadf8704a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1725.861189] env[62525]: INFO nova.compute.manager [req-fcc59e5a-eceb-4e73-a79c-299263626c54 req-0f4678df-3aa4-45df-8d68-60c711786b3e service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Neutron deleted interface 6d3644a6-dbaa-4a30-930a-53beadf8704a; detaching it from the instance and deleting it from the info cache [ 1725.861189] env[62525]: DEBUG nova.network.neutron [req-fcc59e5a-eceb-4e73-a79c-299263626c54 req-0f4678df-3aa4-45df-8d68-60c711786b3e service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.885809] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781867, 'name': ReconfigVM_Task, 'duration_secs': 0.510518} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.886300] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfigured VM instance instance-0000005d to attach disk [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1725.892029] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9084a9a8-6b80-431e-ab10-a3bf38888a26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.897853] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121235} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.899425] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1725.899990] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1725.899990] env[62525]: value = "task-1781870" [ 1725.899990] env[62525]: _type = "Task" [ 1725.899990] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.901190] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e413b-9c2b-4327-ab98-441deedff0c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.930536] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 130a3015-6caf-4374-a35f-9dd49bb8b3bf/130a3015-6caf-4374-a35f-9dd49bb8b3bf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1725.938016] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f64418c-5952-4108-abe9-c2179afca620 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.954185] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781870, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.959819] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1725.959819] env[62525]: value = "task-1781871" [ 1725.959819] env[62525]: _type = "Task" [ 1725.959819] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.968831] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781871, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.232016] env[62525]: DEBUG nova.scheduler.client.report [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1726.259115] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781869, 'name': PowerOffVM_Task, 'duration_secs': 0.249378} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.259486] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.259816] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.260670] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8f4404-26fa-4997-83cc-f2754cdedc07 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.268303] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1726.268622] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63cec88c-eaf1-4762-aec4-63530cb4a1f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.301060] env[62525]: DEBUG nova.network.neutron [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.309323] env[62525]: DEBUG nova.network.neutron [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.363775] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ec19a35-06b2-4c6a-9442-c5ce3bbd9690 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.374676] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9dd406-b23a-4658-a250-dfec95b08443 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.409035] env[62525]: DEBUG nova.compute.manager [req-fcc59e5a-eceb-4e73-a79c-299263626c54 req-0f4678df-3aa4-45df-8d68-60c711786b3e service nova] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Detach interface failed, port_id=6d3644a6-dbaa-4a30-930a-53beadf8704a, reason: Instance 2f713b35-9d07-4d25-a333-506fd2469bd5 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1726.413501] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1726.413713] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1726.413929] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1726.414537] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8894ebea-361e-4eb7-be5e-7013bb72c00e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.419653] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781870, 'name': Rename_Task, 'duration_secs': 0.177768} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.420873] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1726.421165] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-908e7d7c-5ffe-4937-9df0-e92852afcbe9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.424334] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1726.424334] env[62525]: value = "task-1781873" [ 1726.424334] env[62525]: _type = "Task" [ 1726.424334] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.429554] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1726.429554] env[62525]: value = "task-1781874" [ 1726.429554] env[62525]: _type = "Task" [ 1726.429554] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.439695] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.444913] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781874, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.471897] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781871, 'name': ReconfigVM_Task, 'duration_secs': 0.337985} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.471897] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 130a3015-6caf-4374-a35f-9dd49bb8b3bf/130a3015-6caf-4374-a35f-9dd49bb8b3bf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1726.471897] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b355507f-a842-4aba-8f69-b32a08cd26f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.479114] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1726.479114] env[62525]: value = "task-1781875" [ 1726.479114] env[62525]: _type = "Task" [ 1726.479114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.487913] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781875, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.643482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.643715] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.737807] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.755268] env[62525]: INFO nova.scheduler.client.report [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocations for instance 3b1a825f-b6a5-4822-86a5-57972f34748c [ 1726.803415] env[62525]: INFO nova.compute.manager [-] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Took 1.89 seconds to deallocate network for instance. [ 1726.812296] env[62525]: INFO nova.compute.manager [-] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Took 1.67 seconds to deallocate network for instance. [ 1726.937830] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214316} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.938572] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.938783] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1726.938988] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1726.945199] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781874, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.989864] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781875, 'name': Rename_Task, 'duration_secs': 0.173453} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.990183] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1726.990433] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-027faecc-ea07-46fa-9c0b-eca92307585e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.996921] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1726.996921] env[62525]: value = "task-1781876" [ 1726.996921] env[62525]: _type = "Task" [ 1726.996921] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.006111] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.146755] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1727.262837] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4fa150bb-5164-44c8-a4c8-10b0267fb090 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "3b1a825f-b6a5-4822-86a5-57972f34748c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.720s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.308924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.309224] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.309451] env[62525]: DEBUG nova.objects.instance [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'resources' on Instance uuid 0067de08-6708-4c7c-a83a-ed9df193d5cd {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1727.318222] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.365876] env[62525]: DEBUG nova.compute.manager [req-b9cea169-e5c3-4d0c-a29b-59474bd291a9 req-a0a11707-b405-43ae-bb89-4a82e988111f service nova] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Received event network-vif-deleted-91b445d1-b4ed-4a07-b4ae-8d7c8e5755e7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.440039] env[62525]: DEBUG oslo_vmware.api [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781874, 'name': PowerOnVM_Task, 'duration_secs': 0.587815} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.441281] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1727.441281] env[62525]: INFO nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1727.441281] env[62525]: DEBUG nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1727.441575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43ff554-b444-43fc-b6b2-08b681b381f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.508564] env[62525]: DEBUG oslo_vmware.api [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781876, 'name': PowerOnVM_Task, 'duration_secs': 0.473421} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.508840] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1727.509540] env[62525]: INFO nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Took 6.47 seconds to spawn the instance on the hypervisor. [ 1727.509688] env[62525]: DEBUG nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1727.510578] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaff3a9-6490-46af-b633-293829560fbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.672722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.964882] env[62525]: INFO nova.compute.manager [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Took 16.12 seconds to build instance. [ 1727.991972] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1727.992233] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1727.992389] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1727.992567] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1727.992708] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1727.992855] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1727.993218] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1727.993276] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1727.993504] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1727.993724] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1727.993914] env[62525]: DEBUG nova.virt.hardware [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1727.994930] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b568a7-58b0-4b60-846e-f2ecfc86f8b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.007355] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482c4240-39b7-47a3-8271-17ca7e193553 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.027628] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:73:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '170fce2e-bab4-4fa8-b3b6-6067a35d8fbd', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1728.037243] env[62525]: DEBUG oslo.service.loopingcall [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.039496] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1728.039943] env[62525]: INFO nova.compute.manager [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Took 14.76 seconds to build instance. [ 1728.043148] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a72fb79-508b-419d-b538-a87de8af2375 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.066211] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1728.066211] env[62525]: value = "task-1781877" [ 1728.066211] env[62525]: _type = "Task" [ 1728.066211] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.078077] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781877, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.116458] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40554589-abc0-4b33-8702-f52f74923e5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.124918] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecc1f9e-3b5d-4fc1-a7c7-7e30a28bbfb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.165983] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aba3144-1579-4fd4-968c-02c077d6170e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.174815] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07439371-a49e-48cb-9464-92ecb5399f34 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.189480] env[62525]: DEBUG nova.compute.provider_tree [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.467399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4f88779f-f078-4d0d-8e9c-bc5dbc56e250 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.632s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.557865] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8870e336-7122-4808-a011-0c4b1c41604a tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.293s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.577428] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781877, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.692583] env[62525]: DEBUG nova.scheduler.client.report [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1729.077804] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781877, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.100632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.100869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.198553] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.201238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.883s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.201496] env[62525]: DEBUG nova.objects.instance [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'resources' on Instance uuid 2f713b35-9d07-4d25-a333-506fd2469bd5 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1729.218129] env[62525]: INFO nova.scheduler.client.report [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted allocations for instance 0067de08-6708-4c7c-a83a-ed9df193d5cd [ 1729.578917] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781877, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.605354] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1729.727898] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53b7358f-542e-4335-9050-b68f6a56f356 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "0067de08-6708-4c7c-a83a-ed9df193d5cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.957s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.944090] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f8f9bc-8e75-45b3-a8a3-adc21648cbca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.952038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc3e51-c07e-4d1c-9a65-e3b4f8f419ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.960338] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.960602] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.989489] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de27821b-9a16-4240-a5f1-424b3ef3d05c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.998175] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53bbfe3-a7b5-4c3a-b1c8-2144780088c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.014185] env[62525]: DEBUG nova.compute.provider_tree [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.048216] env[62525]: DEBUG nova.compute.manager [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1730.079339] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781877, 'name': CreateVM_Task, 'duration_secs': 1.634125} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.079523] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1730.080244] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.080428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.080763] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1730.081030] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-857dcb45-8780-43c0-b838-0118096c6f61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.085949] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1730.085949] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528b070c-6e2d-7783-7c5e-43e60a9724f7" [ 1730.085949] env[62525]: _type = "Task" [ 1730.085949] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.097713] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528b070c-6e2d-7783-7c5e-43e60a9724f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.125215] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.489103] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1730.517249] env[62525]: DEBUG nova.scheduler.client.report [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1730.567439] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.597057] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528b070c-6e2d-7783-7c5e-43e60a9724f7, 'name': SearchDatastore_Task, 'duration_secs': 0.013088} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.597384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.597875] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1730.597875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.598019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.598187] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1730.598445] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7617c27a-c0aa-4dc0-ab83-12b5c3cbff48 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.607704] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1730.607916] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1730.608662] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47baca6f-3e3d-485b-89db-453e5413d8a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.613769] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1730.613769] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529773d7-e6d4-7b01-f1bf-30238db96ce6" [ 1730.613769] env[62525]: _type = "Task" [ 1730.613769] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.621605] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529773d7-e6d4-7b01-f1bf-30238db96ce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.009039] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.022426] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.024574] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.352s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.026212] env[62525]: INFO nova.compute.claims [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1731.043121] env[62525]: INFO nova.scheduler.client.report [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted allocations for instance 2f713b35-9d07-4d25-a333-506fd2469bd5 [ 1731.125198] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529773d7-e6d4-7b01-f1bf-30238db96ce6, 'name': SearchDatastore_Task, 'duration_secs': 0.012101} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.125962] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f449da-5138-414e-a2ea-33d872be7e38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.132057] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1731.132057] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e75c47-f591-9b6e-e8f3-3f866952adf7" [ 1731.132057] env[62525]: _type = "Task" [ 1731.132057] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.138884] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e75c47-f591-9b6e-e8f3-3f866952adf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.550805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8f5114cc-0631-47af-9598-9b565cb013ff tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "2f713b35-9d07-4d25-a333-506fd2469bd5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.584s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.641891] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e75c47-f591-9b6e-e8f3-3f866952adf7, 'name': SearchDatastore_Task, 'duration_secs': 0.025156} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.642367] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.642486] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1731.642755] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-571e17e5-bcf2-4573-9f9b-33c3510a4998 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.649134] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1731.649134] env[62525]: value = "task-1781879" [ 1731.649134] env[62525]: _type = "Task" [ 1731.649134] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.656905] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.158338] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459361} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.160707] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1732.160934] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1732.161370] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8be937bb-1075-4550-88d9-67bb2b442a24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.167850] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1732.167850] env[62525]: value = "task-1781880" [ 1732.167850] env[62525]: _type = "Task" [ 1732.167850] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.179247] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781880, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.258062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5c4d2f-f13e-4542-bc95-80b0e5cf3fb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.265441] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8590ff25-7bd3-4a26-b244-f9eead5a48c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.295844] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac103cb6-2f42-4f10-bee6-3a39fc898780 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.303361] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8ac235-cc38-4836-9fdc-7453e0c51e6f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.316450] env[62525]: DEBUG nova.compute.provider_tree [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.677987] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781880, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077682} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.678305] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1732.679064] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1784c096-cc31-48b9-afc5-fd475e34a39b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.704761] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1732.705278] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5791043-cef7-4eee-aa01-9235bbb0bff0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.726862] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1732.726862] env[62525]: value = "task-1781881" [ 1732.726862] env[62525]: _type = "Task" [ 1732.726862] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.734827] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.820345] env[62525]: DEBUG nova.scheduler.client.report [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1733.236879] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.324835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.325455] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1733.328052] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.203s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.330201] env[62525]: INFO nova.compute.claims [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1733.472103] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.472413] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.518985] env[62525]: DEBUG nova.compute.manager [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1733.736891] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781881, 'name': ReconfigVM_Task, 'duration_secs': 0.9574} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.737276] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Reconfigured VM instance instance-0000005c to attach disk [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875/ad4e94cc-d59c-4876-bf66-ec084350f875.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1733.737894] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8df6a5af-14a6-4fc1-aa25-772be540f49c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.744085] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1733.744085] env[62525]: value = "task-1781882" [ 1733.744085] env[62525]: _type = "Task" [ 1733.744085] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.751576] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781882, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.834424] env[62525]: DEBUG nova.compute.utils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1733.838062] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1733.838062] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.893989] env[62525]: DEBUG nova.policy [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0eb16caec01e491a9369f27194a2836a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45e20a581c76424a8f8c2c844f1e04f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1733.975647] env[62525]: DEBUG nova.compute.utils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1734.091368] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.137734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "2f589dc1-9244-475f-86d0-4b69b511508b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.138072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.138317] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.138511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.138681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.142419] env[62525]: INFO nova.compute.manager [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Terminating instance [ 1734.143615] env[62525]: DEBUG nova.compute.manager [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1734.143615] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1734.144695] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c90ec7e-76e2-42c8-ba9c-cc1c6c489a8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.154685] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1734.154935] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad1fdd7a-6754-429f-9b78-37f45729ed23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.162448] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1734.162448] env[62525]: value = "task-1781883" [ 1734.162448] env[62525]: _type = "Task" [ 1734.162448] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.171071] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.179474] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Successfully created port: bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1734.255077] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781882, 'name': Rename_Task, 'duration_secs': 0.156095} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.255354] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1734.255594] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ead63148-48e0-44a3-b0e5-91739072624e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.261800] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1734.261800] env[62525]: value = "task-1781884" [ 1734.261800] env[62525]: _type = "Task" [ 1734.261800] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.269555] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.344291] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1734.478759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.584802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc8e72a-42b0-4ac4-839a-bfc2f522be94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.592432] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04137d94-afaf-4c21-b6b2-ed4b5eb249d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.623096] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f14a68-3404-45dc-b8aa-f159c0a0afe1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.630908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519d813c-4872-4722-b147-e2d283025c05 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.644183] env[62525]: DEBUG nova.compute.provider_tree [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1734.673407] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781883, 'name': PowerOffVM_Task, 'duration_secs': 0.275694} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.673794] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1734.673874] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1734.674084] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ed22600-5aa4-4752-a6ef-44d47c510517 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.772556] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781884, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.804044] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1734.804044] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1734.804044] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleting the datastore file [datastore1] 2f589dc1-9244-475f-86d0-4b69b511508b {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1734.804321] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f7bbc17-fff2-49e5-bf4a-0ac69ee0d52a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.817027] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1734.817027] env[62525]: value = "task-1781886" [ 1734.817027] env[62525]: _type = "Task" [ 1734.817027] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.824715] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.147752] env[62525]: DEBUG nova.scheduler.client.report [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1735.275738] env[62525]: DEBUG oslo_vmware.api [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781884, 'name': PowerOnVM_Task, 'duration_secs': 0.540081} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.276043] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.276272] env[62525]: DEBUG nova.compute.manager [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.277054] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3d91d1-c49c-4f5a-bd6b-a4177ea13ef8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.326362] env[62525]: DEBUG oslo_vmware.api [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30702} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.326598] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1735.326810] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1735.327050] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1735.327248] env[62525]: INFO nova.compute.manager [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1735.327551] env[62525]: DEBUG oslo.service.loopingcall [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1735.327743] env[62525]: DEBUG nova.compute.manager [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1735.327837] env[62525]: DEBUG nova.network.neutron [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1735.353786] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1735.380786] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:16:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ebf24d7-e7f0-4555-bbf8-7b4230bb9b33',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2073680030',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1735.381050] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1735.381211] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1735.381392] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1735.381537] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1735.381710] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1735.382016] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1735.382119] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1735.382253] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1735.382412] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1735.382581] env[62525]: DEBUG nova.virt.hardware [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1735.383463] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b2aed4-2a48-4fba-9ad5-9819257ce5b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.397602] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f91979-3ba8-4943-adee-3022bb98a325 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.547095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.547444] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.547663] env[62525]: INFO nova.compute.manager [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attaching volume bab92fbe-a9ca-438a-83f9-e0041c650d07 to /dev/sdb [ 1735.587697] env[62525]: DEBUG nova.compute.manager [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Received event network-vif-plugged-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.587697] env[62525]: DEBUG oslo_concurrency.lockutils [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.588964] env[62525]: DEBUG oslo_concurrency.lockutils [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.588964] env[62525]: DEBUG oslo_concurrency.lockutils [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.588964] env[62525]: DEBUG nova.compute.manager [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] No waiting events found dispatching network-vif-plugged-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1735.588964] env[62525]: WARNING nova.compute.manager [req-f1ab4c23-e7e5-4857-81d3-70a55ea93851 req-cbbe97b1-5cf5-4eec-816b-a3ffc91f7795 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Received unexpected event network-vif-plugged-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb for instance with vm_state building and task_state spawning. [ 1735.593644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443d6858-e688-4670-89c7-9f561c07fa2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.603914] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5fa509-6c00-4960-a369-ace148d4901e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.613333] env[62525]: DEBUG nova.compute.manager [req-65ae7955-4e28-4421-ad8e-3bfbd792052f req-6f150c96-2cf3-4464-acb7-e29e40b2739e service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Received event network-vif-deleted-988d8ed1-10c8-470a-81b1-0d6c0839c35b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.613439] env[62525]: INFO nova.compute.manager [req-65ae7955-4e28-4421-ad8e-3bfbd792052f req-6f150c96-2cf3-4464-acb7-e29e40b2739e service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Neutron deleted interface 988d8ed1-10c8-470a-81b1-0d6c0839c35b; detaching it from the instance and deleting it from the info cache [ 1735.613591] env[62525]: DEBUG nova.network.neutron [req-65ae7955-4e28-4421-ad8e-3bfbd792052f req-6f150c96-2cf3-4464-acb7-e29e40b2739e service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.620170] env[62525]: DEBUG nova.virt.block_device [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating existing volume attachment record: cac298bf-181b-4d2e-961c-73b32f92af68 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1735.652427] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.652950] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1735.655460] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.088s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.775659] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Successfully updated port: bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.796720] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.097549] env[62525]: DEBUG nova.network.neutron [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.116498] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd7bb534-692c-4ae6-8bdc-922443edbf78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.125783] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091e52ad-b155-4cd8-8eb4-0009c0c134b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.161970] env[62525]: DEBUG nova.compute.utils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1736.165733] env[62525]: INFO nova.compute.claims [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.169321] env[62525]: DEBUG nova.compute.manager [req-65ae7955-4e28-4421-ad8e-3bfbd792052f req-6f150c96-2cf3-4464-acb7-e29e40b2739e service nova] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Detach interface failed, port_id=988d8ed1-10c8-470a-81b1-0d6c0839c35b, reason: Instance 2f589dc1-9244-475f-86d0-4b69b511508b could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1736.170168] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1736.170339] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1736.251739] env[62525]: DEBUG nova.policy [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1736.281751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.282810] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.282810] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.603027] env[62525]: INFO nova.compute.manager [-] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Took 1.27 seconds to deallocate network for instance. [ 1736.611802] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Successfully created port: 971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1736.672050] env[62525]: INFO nova.compute.resource_tracker [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating resource usage from migration f4cfc1b6-3b0c-4bf2-a28a-d34d7e9c9a52 [ 1736.677938] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1736.815610] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.962660] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e271d5-de9e-42ce-aa59-2ac1ded70038 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.965691] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "ad4e94cc-d59c-4876-bf66-ec084350f875" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.965924] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.966141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.966403] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.966590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.969243] env[62525]: INFO nova.compute.manager [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Terminating instance [ 1736.970955] env[62525]: DEBUG nova.compute.manager [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1736.971168] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1736.971943] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395ec7e2-8d74-44af-9052-418fc6734832 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.977383] env[62525]: DEBUG nova.network.neutron [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.979703] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4232ec-c5f1-437e-a220-e6421a2f993d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.985200] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1736.985758] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af9d772c-ba5c-4a31-95ea-43dc39960831 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.014474] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57ef655-098e-4238-85ca-113679c211f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.017643] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1737.017643] env[62525]: value = "task-1781890" [ 1737.017643] env[62525]: _type = "Task" [ 1737.017643] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.025079] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01052467-1532-4e03-8597-fd5474e26ace {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.033740] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.044146] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.044401] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.044746] env[62525]: DEBUG nova.objects.instance [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'flavor' on Instance uuid 80fbfbda-07fb-43ab-be74-3cbdaf890a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1737.046460] env[62525]: DEBUG nova.compute.provider_tree [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1737.107135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.486280] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.486671] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Instance network_info: |[{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1737.487235] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:d5:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bdc798dc-53dc-400d-aff6-c49ee2c1f4fb', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1737.499693] env[62525]: DEBUG oslo.service.loopingcall [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.499935] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1737.502085] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce0d5fad-c06c-4d79-8fdb-4f066b34bbbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.532392] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781890, 'name': PowerOffVM_Task, 'duration_secs': 0.320129} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.533762] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1737.534158] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1737.534410] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1737.534410] env[62525]: value = "task-1781891" [ 1737.534410] env[62525]: _type = "Task" [ 1737.534410] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.534591] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db4075db-c431-4606-8d52-583aa8195656 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.543907] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781891, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.551722] env[62525]: DEBUG nova.scheduler.client.report [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1737.578320] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.578558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.668169] env[62525]: DEBUG nova.objects.instance [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'pci_requests' on Instance uuid 80fbfbda-07fb-43ab-be74-3cbdaf890a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1737.685358] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1737.717209] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1737.717209] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1737.717209] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] ad4e94cc-d59c-4876-bf66-ec084350f875 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1737.718326] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1737.718542] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1737.718692] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1737.718864] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1737.719016] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1737.719165] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1737.719413] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1737.719595] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1737.719761] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1737.719916] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1737.720095] env[62525]: DEBUG nova.virt.hardware [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1737.720361] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-188bf8a1-91c3-4800-b22f-9b7d0f953ac0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.722849] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c23308c-4836-48d6-9e75-5fdf91e693cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.732475] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67454416-5d9d-4fe5-9801-1d792c08f765 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.736237] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1737.736237] env[62525]: value = "task-1781893" [ 1737.736237] env[62525]: _type = "Task" [ 1737.736237] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.753545] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.863475] env[62525]: DEBUG nova.compute.manager [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Received event network-changed-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1737.863901] env[62525]: DEBUG nova.compute.manager [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Refreshing instance network info cache due to event network-changed-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1737.863958] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.864103] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.864268] env[62525]: DEBUG nova.network.neutron [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Refreshing network info cache for port bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1738.046261] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781891, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.058336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.403s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.058529] env[62525]: INFO nova.compute.manager [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Migrating [ 1738.064896] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.056s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.066425] env[62525]: INFO nova.compute.claims [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1738.081066] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1738.170450] env[62525]: DEBUG nova.objects.base [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Object Instance<80fbfbda-07fb-43ab-be74-3cbdaf890a55> lazy-loaded attributes: flavor,pci_requests {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1738.170674] env[62525]: DEBUG nova.network.neutron [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1738.201933] env[62525]: DEBUG nova.compute.manager [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Received event network-vif-plugged-971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1738.202192] env[62525]: DEBUG oslo_concurrency.lockutils [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] Acquiring lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.202405] env[62525]: DEBUG oslo_concurrency.lockutils [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.202516] env[62525]: DEBUG oslo_concurrency.lockutils [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.202681] env[62525]: DEBUG nova.compute.manager [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] No waiting events found dispatching network-vif-plugged-971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1738.202838] env[62525]: WARNING nova.compute.manager [req-3e817009-8642-4aea-b31c-a988b1847e9d req-7ed614ad-75e5-44ea-8ffd-ad6a0c12d47d service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Received unexpected event network-vif-plugged-971b9aa1-023e-45dd-b9dd-ddc0c852ee18 for instance with vm_state building and task_state spawning. [ 1738.243717] env[62525]: DEBUG nova.policy [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd06d3779ee214d11b15b03546905a3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12f7b101b8848f28f2fc65ce3f0076c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1738.248939] env[62525]: DEBUG oslo_vmware.api [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168979} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.249236] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1738.249461] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1738.249674] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1738.249899] env[62525]: INFO nova.compute.manager [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1738.250185] env[62525]: DEBUG oslo.service.loopingcall [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.250412] env[62525]: DEBUG nova.compute.manager [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1738.250538] env[62525]: DEBUG nova.network.neutron [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1738.546429] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781891, 'name': CreateVM_Task, 'duration_secs': 0.515432} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.546624] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1738.547357] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.547543] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.547862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1738.548133] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086765b8-84fe-497c-aff7-4b50e54bab83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.552859] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1738.552859] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5268e678-d2fb-6deb-6f36-9da81093406a" [ 1738.552859] env[62525]: _type = "Task" [ 1738.552859] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.560807] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e678-d2fb-6deb-6f36-9da81093406a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.575517] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.575721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.575913] env[62525]: DEBUG nova.network.neutron [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1738.607775] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.700054] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Successfully updated port: 971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1739.006175] env[62525]: DEBUG nova.compute.manager [req-a8fe980d-0d0a-4385-bb40-ec7d56ab01c3 req-8399ae8d-4cdb-4e6f-bba1-57d49c24daae service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Received event network-vif-deleted-170fce2e-bab4-4fa8-b3b6-6067a35d8fbd {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1739.006175] env[62525]: INFO nova.compute.manager [req-a8fe980d-0d0a-4385-bb40-ec7d56ab01c3 req-8399ae8d-4cdb-4e6f-bba1-57d49c24daae service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Neutron deleted interface 170fce2e-bab4-4fa8-b3b6-6067a35d8fbd; detaching it from the instance and deleting it from the info cache [ 1739.006175] env[62525]: DEBUG nova.network.neutron [req-a8fe980d-0d0a-4385-bb40-ec7d56ab01c3 req-8399ae8d-4cdb-4e6f-bba1-57d49c24daae service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.007544] env[62525]: DEBUG nova.network.neutron [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updated VIF entry in instance network info cache for port bdc798dc-53dc-400d-aff6-c49ee2c1f4fb. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1739.008289] env[62525]: DEBUG nova.network.neutron [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.063607] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5268e678-d2fb-6deb-6f36-9da81093406a, 'name': SearchDatastore_Task, 'duration_secs': 0.008959} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.063745] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.064026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1739.064303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.064492] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.064710] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.064993] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f901d93-d9f5-4921-999a-3a24e36c11ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.073081] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.073304] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1739.074024] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf7af1a-2aef-423e-a109-facb5bb3c4a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.078865] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1739.078865] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d25d31-9c09-1059-f63b-5a17e1330fab" [ 1739.078865] env[62525]: _type = "Task" [ 1739.078865] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.091247] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d25d31-9c09-1059-f63b-5a17e1330fab, 'name': SearchDatastore_Task, 'duration_secs': 0.008093} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.092046] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c59423e-79aa-489e-8a2a-58ddfcf91371 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.097348] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1739.097348] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52abc05b-2e22-646f-7bb7-247b63d10d06" [ 1739.097348] env[62525]: _type = "Task" [ 1739.097348] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.104642] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52abc05b-2e22-646f-7bb7-247b63d10d06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.203771] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.203771] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.203771] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1739.286785] env[62525]: DEBUG nova.network.neutron [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.327819] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1f4d5a-e934-4b01-90d1-d1ff6602fb78 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.336793] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7da5b80-34db-4b1c-abfc-8bbbdeb86ab8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.366368] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1bf29b-675e-4290-b4ab-1426567c78b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.373917] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2f41f4-3578-48d1-870c-9de50489d674 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.388726] env[62525]: DEBUG nova.compute.provider_tree [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1739.479784] env[62525]: DEBUG nova.network.neutron [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.512047] env[62525]: DEBUG oslo_concurrency.lockutils [req-e8e97c10-af05-408c-baea-3635872dbb43 req-7cb3ccfb-df72-4ccd-a053-ee6de7e36c13 service nova] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.512262] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05155336-4597-41a1-8ec4-2f0c9885eb0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.522039] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3b270a-5cd9-46c1-a919-ecd8dc38f4e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.555328] env[62525]: DEBUG nova.compute.manager [req-a8fe980d-0d0a-4385-bb40-ec7d56ab01c3 req-8399ae8d-4cdb-4e6f-bba1-57d49c24daae service nova] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Detach interface failed, port_id=170fce2e-bab4-4fa8-b3b6-6067a35d8fbd, reason: Instance ad4e94cc-d59c-4876-bf66-ec084350f875 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1739.608455] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52abc05b-2e22-646f-7bb7-247b63d10d06, 'name': SearchDatastore_Task, 'duration_secs': 0.008692} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.608662] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.608898] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1739.609168] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41db1584-66c5-466b-bcd8-d04b59773824 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.615625] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1739.615625] env[62525]: value = "task-1781895" [ 1739.615625] env[62525]: _type = "Task" [ 1739.615625] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.622968] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.734812] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1739.789344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.873629] env[62525]: DEBUG nova.network.neutron [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Updating instance_info_cache with network_info: [{"id": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "address": "fa:16:3e:19:81:f7", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971b9aa1-02", "ovs_interfaceid": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.888440] env[62525]: DEBUG nova.compute.manager [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Received event network-changed-971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1739.888641] env[62525]: DEBUG nova.compute.manager [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Refreshing instance network info cache due to event network-changed-971b9aa1-023e-45dd-b9dd-ddc0c852ee18. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1739.888826] env[62525]: DEBUG oslo_concurrency.lockutils [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] Acquiring lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.891326] env[62525]: DEBUG nova.scheduler.client.report [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1739.982593] env[62525]: INFO nova.compute.manager [-] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Took 1.73 seconds to deallocate network for instance. [ 1740.125815] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494375} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.126256] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1740.126496] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1740.126795] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f22f7332-1ec2-482c-a372-f21bfab415b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.133419] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1740.133419] env[62525]: value = "task-1781896" [ 1740.133419] env[62525]: _type = "Task" [ 1740.133419] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.143493] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.282902] env[62525]: DEBUG nova.network.neutron [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Successfully updated port: 6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1740.376063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.376223] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Instance network_info: |[{"id": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "address": "fa:16:3e:19:81:f7", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971b9aa1-02", "ovs_interfaceid": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1740.376477] env[62525]: DEBUG oslo_concurrency.lockutils [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] Acquired lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.376660] env[62525]: DEBUG nova.network.neutron [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Refreshing network info cache for port 971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1740.377883] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:81:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '971b9aa1-023e-45dd-b9dd-ddc0c852ee18', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1740.385662] env[62525]: DEBUG oslo.service.loopingcall [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1740.388691] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1740.389160] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ea17611-296a-4485-80f8-3d110a8c1793 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.404207] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.404694] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1740.407246] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.316s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.414370] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1740.414370] env[62525]: value = "task-1781897" [ 1740.414370] env[62525]: _type = "Task" [ 1740.414370] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.423622] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781897, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.488884] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.603452] env[62525]: DEBUG nova.network.neutron [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Updated VIF entry in instance network info cache for port 971b9aa1-023e-45dd-b9dd-ddc0c852ee18. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1740.603834] env[62525]: DEBUG nova.network.neutron [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Updating instance_info_cache with network_info: [{"id": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "address": "fa:16:3e:19:81:f7", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap971b9aa1-02", "ovs_interfaceid": "971b9aa1-023e-45dd-b9dd-ddc0c852ee18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.643128] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064353} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.643450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1740.644307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5961d11-157b-46d8-83b0-d84950026c5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.667965] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.669189] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1740.669421] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369819', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'name': 'volume-bab92fbe-a9ca-438a-83f9-e0041c650d07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'serial': 'bab92fbe-a9ca-438a-83f9-e0041c650d07'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1740.669822] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3cf726b-502b-4a92-9fbc-4edd6c4aa182 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.684785] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dece7302-5e40-4672-b05c-15989cd977b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.706079] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af243e81-a837-4dea-85cc-629163efd9ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.707217] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1740.707217] env[62525]: value = "task-1781898" [ 1740.707217] env[62525]: _type = "Task" [ 1740.707217] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.732541] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-bab92fbe-a9ca-438a-83f9-e0041c650d07/volume-bab92fbe-a9ca-438a-83f9-e0041c650d07.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.733359] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c2b74dc-9d24-4d48-88dd-65f331fa9b89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.750216] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.755867] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1740.755867] env[62525]: value = "task-1781899" [ 1740.755867] env[62525]: _type = "Task" [ 1740.755867] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.764216] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.785115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.785307] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.785523] env[62525]: DEBUG nova.network.neutron [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1740.912129] env[62525]: INFO nova.compute.claims [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1740.916548] env[62525]: DEBUG nova.compute.utils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1740.918094] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1740.918286] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1740.929418] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781897, 'name': CreateVM_Task, 'duration_secs': 0.434283} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.930182] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1740.930849] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.931019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.931354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1740.931830] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb87e4de-26b4-428d-adfc-83cd176b8b29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.936716] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1740.936716] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526c68e3-0319-35c6-1858-497dc2ecc7a0" [ 1740.936716] env[62525]: _type = "Task" [ 1740.936716] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.945675] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526c68e3-0319-35c6-1858-497dc2ecc7a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.962188] env[62525]: DEBUG nova.policy [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83bf79d024f345a9a8c02004f8cefbaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab7fca262814290a975bf85badc9b71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1741.039588] env[62525]: DEBUG nova.compute.manager [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1741.040375] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.040375] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.040375] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.040541] env[62525]: DEBUG nova.compute.manager [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] No waiting events found dispatching network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1741.040743] env[62525]: WARNING nova.compute.manager [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received unexpected event network-vif-plugged-6c93e506-f746-4d2e-922a-f389df5494a8 for instance with vm_state active and task_state None. [ 1741.040948] env[62525]: DEBUG nova.compute.manager [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-changed-6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1741.041163] env[62525]: DEBUG nova.compute.manager [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing instance network info cache due to event network-changed-6c93e506-f746-4d2e-922a-f389df5494a8. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1741.041393] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.053999] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.054297] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.106258] env[62525]: DEBUG oslo_concurrency.lockutils [req-9c90b065-6100-42ee-af91-f520decb563a req-fd4b01b2-c123-4054-8e05-3dea5b075a43 service nova] Releasing lock "refresh_cache-b0d6acae-8da3-4ed9-8832-b1e88338ed27" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.218489] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.240146] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Successfully created port: d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1741.265756] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781899, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.302752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0587d1eb-b064-4024-9b4e-01c3dc6732e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.321474] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1741.341023] env[62525]: WARNING nova.network.neutron [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] 58fc2de9-73a3-4f13-914c-ad34af02ccb5 already exists in list: networks containing: ['58fc2de9-73a3-4f13-914c-ad34af02ccb5']. ignoring it [ 1741.420441] env[62525]: INFO nova.compute.resource_tracker [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating resource usage from migration d5a5cb92-367a-4848-a0d1-056710d84bb4 [ 1741.423581] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1741.459202] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526c68e3-0319-35c6-1858-497dc2ecc7a0, 'name': SearchDatastore_Task, 'duration_secs': 0.008874} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.459202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.459202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1741.459202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.459202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.459202] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1741.459202] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e32d1618-a973-4c59-a560-a2469249a8d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.469692] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1741.470097] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1741.473469] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-969fb7e0-86e7-472c-8329-6d54d4f3d08b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.485021] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1741.485021] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528a2975-abe4-f83d-358d-87c6c88bf9c8" [ 1741.485021] env[62525]: _type = "Task" [ 1741.485021] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.498033] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528a2975-abe4-f83d-358d-87c6c88bf9c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009514} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.498033] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-154defa5-0552-410f-8379-0c997d346b71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.503970] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1741.503970] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529009c5-709d-70fc-cbda-f708dcef1286" [ 1741.503970] env[62525]: _type = "Task" [ 1741.503970] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.516734] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529009c5-709d-70fc-cbda-f708dcef1286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.561123] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1741.562238] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1741.718862] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781898, 'name': ReconfigVM_Task, 'duration_secs': 0.649402} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.720031] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1741.721185] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44aa1639-7d8b-43fa-ade5-a46a4620ba22 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.726045] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6810133-95cd-4bce-aeae-da9a0aeddb7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.733943] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4d15ea-8bb0-4c93-86e2-95e9b2b6ffa1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.736371] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1741.736371] env[62525]: value = "task-1781900" [ 1741.736371] env[62525]: _type = "Task" [ 1741.736371] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.764793] env[62525]: DEBUG nova.network.neutron [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c93e506-f746-4d2e-922a-f389df5494a8", "address": "fa:16:3e:8f:6e:f1", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93e506-f7", "ovs_interfaceid": "6c93e506-f746-4d2e-922a-f389df5494a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.770298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aaac50e-5ead-4283-93b6-27c32783bb3c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.778673] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781900, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.783814] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781899, 'name': ReconfigVM_Task, 'duration_secs': 0.798244} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.785953] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-bab92fbe-a9ca-438a-83f9-e0041c650d07/volume-bab92fbe-a9ca-438a-83f9-e0041c650d07.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1741.791697] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-630cc023-e44d-4935-bc4b-09a030fb6f4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.805089] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d523566-ec1b-41c1-943b-9aadb68e388d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.817823] env[62525]: DEBUG nova.compute.provider_tree [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.820852] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1741.820852] env[62525]: value = "task-1781901" [ 1741.820852] env[62525]: _type = "Task" [ 1741.820852] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.827415] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1741.830694] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b94e7a3-5ec6-4960-90fb-048884279151 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.832305] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.837452] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1741.837452] env[62525]: value = "task-1781902" [ 1741.837452] env[62525]: _type = "Task" [ 1741.837452] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.845900] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.018560] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529009c5-709d-70fc-cbda-f708dcef1286, 'name': SearchDatastore_Task, 'duration_secs': 0.027315} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.018869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.019286] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b0d6acae-8da3-4ed9-8832-b1e88338ed27/b0d6acae-8da3-4ed9-8832-b1e88338ed27.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1742.019593] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aabc60ca-6076-4f01-8fe6-10d9a197ebc9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.027154] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1742.027154] env[62525]: value = "task-1781903" [ 1742.027154] env[62525]: _type = "Task" [ 1742.027154] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.040425] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.064375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.064521] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.064645] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1742.246271] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781900, 'name': Rename_Task, 'duration_secs': 0.159289} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.246600] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1742.246764] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a723760-7a18-476a-b33d-023b2f1b3dcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.252977] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1742.252977] env[62525]: value = "task-1781904" [ 1742.252977] env[62525]: _type = "Task" [ 1742.252977] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.260376] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.276140] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.276744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.276957] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.277307] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.277540] env[62525]: DEBUG nova.network.neutron [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Refreshing network info cache for port 6c93e506-f746-4d2e-922a-f389df5494a8 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1742.279378] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43366adc-ea1b-402b-9dd1-8a04c2f8eddf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.297955] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.298254] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.298392] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.298561] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.298707] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.298873] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.299112] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.299534] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.299534] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.299621] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.299788] env[62525]: DEBUG nova.virt.hardware [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.306336] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfiguring VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1742.307434] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebb58f93-b618-443e-b529-9f2db605b279 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.326677] env[62525]: DEBUG nova.scheduler.client.report [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1742.329989] env[62525]: DEBUG oslo_vmware.api [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1742.329989] env[62525]: value = "task-1781905" [ 1742.329989] env[62525]: _type = "Task" [ 1742.329989] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.339928] env[62525]: DEBUG oslo_vmware.api [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781901, 'name': ReconfigVM_Task, 'duration_secs': 0.15317} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.343628] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369819', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'name': 'volume-bab92fbe-a9ca-438a-83f9-e0041c650d07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'serial': 'bab92fbe-a9ca-438a-83f9-e0041c650d07'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1742.348617] env[62525]: DEBUG oslo_vmware.api [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781905, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.354287] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781902, 'name': PowerOffVM_Task, 'duration_secs': 0.222371} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.354555] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.354763] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1742.442568] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1742.470671] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.470954] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.471128] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.471348] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.471499] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.471663] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.471930] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.472115] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.472375] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.472451] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.472631] env[62525]: DEBUG nova.virt.hardware [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.474357] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cff033-223f-4ebf-9ba6-7067b525f626 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.482513] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1508dd0c-8faf-46ff-8861-d2965414de4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.537331] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781903, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.767623] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781904, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.832061] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.425s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.832308] env[62525]: INFO nova.compute.manager [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Migrating [ 1742.845910] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.049s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.846450] env[62525]: DEBUG nova.objects.instance [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1742.863611] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.864167] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.864292] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.864744] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.865341] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.865581] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.865836] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.866194] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.866595] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.866595] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.866859] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.880146] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a6ab673-eb0a-467d-bb7d-1994ea0fdf5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.892986] env[62525]: DEBUG oslo_vmware.api [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781905, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.900089] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1742.900089] env[62525]: value = "task-1781906" [ 1742.900089] env[62525]: _type = "Task" [ 1742.900089] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.914051] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781906, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.039174] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800168} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.039453] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b0d6acae-8da3-4ed9-8832-b1e88338ed27/b0d6acae-8da3-4ed9-8832-b1e88338ed27.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1743.039661] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1743.039908] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71edd962-b79e-4660-9716-35d1b602e458 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.045775] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1743.045775] env[62525]: value = "task-1781907" [ 1743.045775] env[62525]: _type = "Task" [ 1743.045775] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.054732] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Successfully updated port: d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1743.060139] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.148400] env[62525]: DEBUG nova.network.neutron [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updated VIF entry in instance network info cache for port 6c93e506-f746-4d2e-922a-f389df5494a8. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1743.149023] env[62525]: DEBUG nova.network.neutron [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c93e506-f746-4d2e-922a-f389df5494a8", "address": "fa:16:3e:8f:6e:f1", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c93e506-f7", "ovs_interfaceid": "6c93e506-f746-4d2e-922a-f389df5494a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.179527] env[62525]: DEBUG nova.compute.manager [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Received event network-vif-plugged-d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.179758] env[62525]: DEBUG oslo_concurrency.lockutils [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.179966] env[62525]: DEBUG oslo_concurrency.lockutils [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.180152] env[62525]: DEBUG oslo_concurrency.lockutils [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.180320] env[62525]: DEBUG nova.compute.manager [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] No waiting events found dispatching network-vif-plugged-d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1743.180484] env[62525]: WARNING nova.compute.manager [req-f0b77c86-8ec1-460f-b984-00eedf0c5f2f req-e0054908-3d25-45a9-bd10-743df09f37f9 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Received unexpected event network-vif-plugged-d205d712-e184-43b0-93aa-3e45e7674f76 for instance with vm_state building and task_state spawning. [ 1743.265298] env[62525]: DEBUG oslo_vmware.api [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781904, 'name': PowerOnVM_Task, 'duration_secs': 0.692538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.265676] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1743.265867] env[62525]: INFO nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1743.266067] env[62525]: DEBUG nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1743.266862] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88141cbb-bc5a-4fbe-b176-e876c64dd9a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.315846] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.343522] env[62525]: DEBUG oslo_vmware.api [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781905, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.359697] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.411452] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781906, 'name': ReconfigVM_Task, 'duration_secs': 0.256827} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.411765] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1743.422471] env[62525]: DEBUG nova.objects.instance [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'flavor' on Instance uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.555996] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080405} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.556296] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1743.557068] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08ef576-6f7d-4d70-8bf5-fe406462a07c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.561734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.561888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.562076] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.580805] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] b0d6acae-8da3-4ed9-8832-b1e88338ed27/b0d6acae-8da3-4ed9-8832-b1e88338ed27.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1743.581845] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19942959-b1d3-423e-be74-041b24969883 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.601322] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1743.601322] env[62525]: value = "task-1781908" [ 1743.601322] env[62525]: _type = "Task" [ 1743.601322] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.610482] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781908, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.651940] env[62525]: DEBUG oslo_concurrency.lockutils [req-ffbfae9e-8618-4dd1-adb0-07c4f54419b9 req-44b0402c-04c7-4aa4-9bd4-aa7d01bf36c4 service nova] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.789313] env[62525]: INFO nova.compute.manager [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Took 16.14 seconds to build instance. [ 1743.818291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.818393] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1743.818601] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.818765] env[62525]: DEBUG nova.network.neutron [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.819849] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.820929] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.821186] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.821391] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.821583] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.821729] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_power_states {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.842984] env[62525]: DEBUG oslo_vmware.api [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781905, 'name': ReconfigVM_Task, 'duration_secs': 1.049583} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.843487] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.843718] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfigured VM to attach interface {{(pid=62525) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1743.866025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-164edda2-41e8-4cc9-b287-a06e4f9ebfd3 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.867614] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.761s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.867989] env[62525]: DEBUG nova.objects.instance [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'resources' on Instance uuid 2f589dc1-9244-475f-86d0-4b69b511508b {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.918114] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1743.918392] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1743.918585] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1743.918794] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1743.918942] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1743.919118] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1743.919379] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1743.919951] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1743.919951] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1743.919951] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1743.920213] env[62525]: DEBUG nova.virt.hardware [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1743.927236] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1743.929101] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-349baf3f-cb4a-4c20-bc84-ee6bba39eed0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.943817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-58bc511c-8d7b-440f-b13e-195edf158f73 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.396s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.951654] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1743.951654] env[62525]: value = "task-1781909" [ 1743.951654] env[62525]: _type = "Task" [ 1743.951654] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.962970] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781909, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.112026] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.112964] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1744.259211] env[62525]: DEBUG nova.network.neutron [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating instance_info_cache with network_info: [{"id": "d205d712-e184-43b0-93aa-3e45e7674f76", "address": "fa:16:3e:b7:71:3a", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd205d712-e1", "ovs_interfaceid": "d205d712-e184-43b0-93aa-3e45e7674f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.291873] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8dbf7a7-4c23-45b5-9b35-b4040cff4f70 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.648s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.328164] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Getting list of instances from cluster (obj){ [ 1744.328164] env[62525]: value = "domain-c8" [ 1744.328164] env[62525]: _type = "ClusterComputeResource" [ 1744.328164] env[62525]: } {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1744.329144] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9f951f-f08c-4cd2-941b-7e88572a0545 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.351465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c9b71cb-fd59-44a8-85be-5755a7d91d56 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.307s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.353510] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Got total of 14 instances {{(pid=62525) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1744.353662] env[62525]: WARNING nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] While synchronizing instance power states, found 17 instances in the database and 14 instances on the hypervisor. [ 1744.353904] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.354135] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.354376] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 2f589dc1-9244-475f-86d0-4b69b511508b {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.354578] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.354724] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 82443424-6071-44b3-bd9a-f92a1a650f27 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.354873] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355114] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355272] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 80fbfbda-07fb-43ab-be74-3cbdaf890a55 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355428] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 462bc19d-1eaa-4c57-8ebb-412a97614f03 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355582] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355972] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 8abf0305-2000-4ffe-aa88-e2b355383ea3 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.355972] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid ad4e94cc-d59c-4876-bf66-ec084350f875 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.356049] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid c2baf40b-ea57-4552-8d56-45bcd49280ec {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.356146] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 130a3015-6caf-4374-a35f-9dd49bb8b3bf {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.356288] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 6624506c-56ad-41f4-8d90-ed34ccfb9385 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.356437] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid b0d6acae-8da3-4ed9-8832-b1e88338ed27 {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.356578] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Triggering sync for uuid 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1744.359149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.359431] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.359670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.359890] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.360068] env[62525]: INFO nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1744.360240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.360428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "2f589dc1-9244-475f-86d0-4b69b511508b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.360659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.360838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.361079] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.361314] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.361487] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.361724] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.361966] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.362156] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.362378] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.362554] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.362767] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.362938] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.363177] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.363603] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.363853] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.364043] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.364271] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "ad4e94cc-d59c-4876-bf66-ec084350f875" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.364469] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.364648] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.364807] env[62525]: INFO nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1744.364957] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.365149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.365331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.365542] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.365750] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.365987] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.366238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.366416] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.366549] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1744.367873] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41e75b7-5329-42a7-bd29-e645aab990ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.370951] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c41c25-86c8-4447-b39e-44f9cb1a5368 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.373719] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfca5283-4100-49b8-9a02-eea015b8d628 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.376408] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83d7135-6328-44e2-9c74-8af80e4dc1ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.378985] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc435944-7e94-46b7-ad9e-2a00fe1985c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.383196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551833ca-58e2-4f18-a12d-364e856db2ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.386711] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1dbc8c4-0cba-4ba2-803e-851490ecda52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.390031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763a71e9-575d-4bde-b9ba-826d5fa18009 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.393089] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da261714-8afa-46ab-b22e-73a7f1549a70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.396351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13088408-fbbd-41fa-bf8f-ebcb166f3a6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.399330] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794497da-41cb-447d-8f91-34d01311760e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.402086] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.460472] env[62525]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1744.476460] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781909, 'name': ReconfigVM_Task, 'duration_secs': 0.359384} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.476934] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1744.477772] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496d16df-c3e6-4d05-9d48-bb10db5684e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.506021] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1744.511748] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a4c2230-20d3-431b-a682-fba841188942 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.534439] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1744.534439] env[62525]: value = "task-1781910" [ 1744.534439] env[62525]: _type = "Task" [ 1744.534439] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.550883] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.618035] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781908, 'name': ReconfigVM_Task, 'duration_secs': 0.555915} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.618364] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Reconfigured VM instance instance-00000060 to attach disk [datastore1] b0d6acae-8da3-4ed9-8832-b1e88338ed27/b0d6acae-8da3-4ed9-8832-b1e88338ed27.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1744.619138] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddb0a9e5-6917-49a4-8834-a2d70bf0b3cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.625798] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1744.625798] env[62525]: value = "task-1781911" [ 1744.625798] env[62525]: _type = "Task" [ 1744.625798] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.642101] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781911, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.700366] env[62525]: DEBUG nova.network.neutron [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.744306] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.761689] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130e7532-7e63-48aa-a198-dbbbd6318d3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.765141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.765484] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance network_info: |[{"id": "d205d712-e184-43b0-93aa-3e45e7674f76", "address": "fa:16:3e:b7:71:3a", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd205d712-e1", "ovs_interfaceid": "d205d712-e184-43b0-93aa-3e45e7674f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1744.765867] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:71:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd205d712-e184-43b0-93aa-3e45e7674f76', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1744.773219] env[62525]: DEBUG oslo.service.loopingcall [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.773794] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1744.774047] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94108941-0040-473c-887b-de1d75a929e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.792017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f3808-d585-4124-9433-a9d29244a643 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.797420] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.797420] env[62525]: value = "task-1781912" [ 1744.797420] env[62525]: _type = "Task" [ 1744.797420] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.826934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98993788-711b-4a83-b910-a40aa60f3351 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.832361] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781912, 'name': CreateVM_Task} progress is 15%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.837156] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2448a8d4-ee85-469e-9a84-565ada3211ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.850823] env[62525]: DEBUG nova.compute.provider_tree [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1744.909564] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.950772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.591s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.951194] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.589s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.954710] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.593s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.955034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.594s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.960891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.596s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.968594] env[62525]: INFO nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1744.968756] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.603s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.969083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.606s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.969402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.606s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.969622] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.225s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.986424] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.625s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.986750] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.624s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.987049] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.622s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.045170] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781910, 'name': ReconfigVM_Task, 'duration_secs': 0.305888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.045840] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Reconfigured VM instance instance-0000005d to attach disk [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec/c2baf40b-ea57-4552-8d56-45bcd49280ec.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1745.046138] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1745.137985] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781911, 'name': Rename_Task, 'duration_secs': 0.156132} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.138895] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1745.139174] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fed89ea7-8f28-487f-a648-bd0dd8e1753c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.147195] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1745.147195] env[62525]: value = "task-1781913" [ 1745.147195] env[62525]: _type = "Task" [ 1745.147195] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.155158] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.205647] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.306995] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781912, 'name': CreateVM_Task, 'duration_secs': 0.392114} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.308649] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1745.308649] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.308649] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.308818] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1745.309009] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f895295-85ff-4c97-8516-aec1d736bc1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.314016] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1745.314016] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529a1eca-f369-7e41-d7ec-07448720bb73" [ 1745.314016] env[62525]: _type = "Task" [ 1745.314016] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.321399] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a1eca-f369-7e41-d7ec-07448720bb73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.354117] env[62525]: DEBUG nova.scheduler.client.report [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1745.473415] env[62525]: DEBUG nova.compute.utils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1745.504546] env[62525]: DEBUG nova.compute.manager [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Received event network-changed-d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1745.504546] env[62525]: DEBUG nova.compute.manager [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Refreshing instance network info cache due to event network-changed-d205d712-e184-43b0-93aa-3e45e7674f76. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1745.504763] env[62525]: DEBUG oslo_concurrency.lockutils [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] Acquiring lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.504916] env[62525]: DEBUG oslo_concurrency.lockutils [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] Acquired lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.505113] env[62525]: DEBUG nova.network.neutron [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Refreshing network info cache for port d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1745.553397] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ce4ab7-031e-440d-9bce-01920b44bd89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.574082] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5708f804-9019-4d71-9cf5-d0982f81abd1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.601418] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1745.631895] env[62525]: DEBUG nova.compute.manager [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1745.658720] env[62525]: DEBUG oslo_vmware.api [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781913, 'name': PowerOnVM_Task, 'duration_secs': 0.511053} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.659752] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.660019] env[62525]: INFO nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1745.660258] env[62525]: DEBUG nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.663062] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17937a37-8c29-40ed-8008-e95e3a59c13f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.825230] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a1eca-f369-7e41-d7ec-07448720bb73, 'name': SearchDatastore_Task, 'duration_secs': 0.010006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.825566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.825803] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1745.826099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.826269] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.826452] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1745.826721] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdc66bce-6fd1-4a90-bd79-b42b7628116b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.840776] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1745.840951] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1745.841673] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57243de2-0c28-47ab-861c-4e64c81a8ba2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.846705] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1745.846705] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a212d2-eddc-f0cf-4b3a-bc7199f66f40" [ 1745.846705] env[62525]: _type = "Task" [ 1745.846705] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.854274] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a212d2-eddc-f0cf-4b3a-bc7199f66f40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.859066] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.861077] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.253s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.862822] env[62525]: INFO nova.compute.claims [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1745.880806] env[62525]: INFO nova.scheduler.client.report [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted allocations for instance 2f589dc1-9244-475f-86d0-4b69b511508b [ 1745.978254] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.123957] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.124238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.124469] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.124666] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.124838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.126862] env[62525]: INFO nova.compute.manager [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Terminating instance [ 1746.130787] env[62525]: DEBUG nova.compute.manager [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1746.131012] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1746.131854] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2d224c-d513-4d48-be5c-b8ba98facffd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.142724] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1746.142972] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e963e873-e319-4e08-881b-0e04081b598e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.148901] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1746.148901] env[62525]: value = "task-1781914" [ 1746.148901] env[62525]: _type = "Task" [ 1746.148901] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.160036] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.161033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.166220] env[62525]: DEBUG nova.network.neutron [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Port 78deceee-4409-4d65-b4f0-dfc4e932c381 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1746.184661] env[62525]: INFO nova.compute.manager [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Took 16.07 seconds to build instance. [ 1746.226127] env[62525]: DEBUG nova.network.neutron [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updated VIF entry in instance network info cache for port d205d712-e184-43b0-93aa-3e45e7674f76. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1746.226471] env[62525]: DEBUG nova.network.neutron [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating instance_info_cache with network_info: [{"id": "d205d712-e184-43b0-93aa-3e45e7674f76", "address": "fa:16:3e:b7:71:3a", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd205d712-e1", "ovs_interfaceid": "d205d712-e184-43b0-93aa-3e45e7674f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.250403] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.250739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.359571] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a212d2-eddc-f0cf-4b3a-bc7199f66f40, 'name': SearchDatastore_Task, 'duration_secs': 0.033446} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.359571] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a505e8a3-7876-4fd3-80f3-216c77927397 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.364479] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1746.364479] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5242ea78-9f96-7b0f-d21c-6482fad1dfc9" [ 1746.364479] env[62525]: _type = "Task" [ 1746.364479] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.374568] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5242ea78-9f96-7b0f-d21c-6482fad1dfc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.389122] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dbffff69-44f4-4d40-8355-729c1aafd442 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.251s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.390032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.029s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.390326] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8e2e649-9e19-4531-a9b2-dd1eea74edf5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.399192] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a7b9c0-03ae-49aa-a40b-98a568bd4403 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.664105] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781914, 'name': PowerOffVM_Task, 'duration_secs': 0.43607} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.664861] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1746.665050] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1746.665297] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5c5dcac-fbef-4653-ac4b-d196b10d5609 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.339028] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43d91715-0d4d-499f-ba65-df9e6122c142 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.238s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.341432] env[62525]: DEBUG oslo_concurrency.lockutils [req-558e4f59-0573-4c37-9420-c0e22bc6691e req-5ca5d4d1-fa54-4d37-a4b7-e11bbfe7f7ba service nova] Releasing lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.341940] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.342112] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.342418] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.342594] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.342772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.342947] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.343119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.344617] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.344817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.345043] env[62525]: INFO nova.compute.manager [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attaching volume b11ed6d8-04b4-43d5-90b7-a24844041af1 to /dev/sdc [ 1747.348981] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "2f589dc1-9244-475f-86d0-4b69b511508b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.959s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.350247] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.350444] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.350627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.350800] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.350956] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.359998] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1747.360276] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1747.360383] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleting the datastore file [datastore1] 8abf0305-2000-4ffe-aa88-e2b355383ea3 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1747.360850] env[62525]: INFO nova.compute.manager [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Terminating instance [ 1747.365682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450408f2-eb5b-4d58-9101-adeb2f6ee720 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.371410] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60541cbd-c8fa-4d8e-a9ed-8bfe4f1b99eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.379026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d490b2-2c20-4951-9045-4bc60873ada2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.379837] env[62525]: DEBUG nova.compute.manager [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1747.380031] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1747.384093] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41375e39-294c-4d19-8a33-ceae0808e7c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.409248] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5242ea78-9f96-7b0f-d21c-6482fad1dfc9, 'name': SearchDatastore_Task, 'duration_secs': 0.017244} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.427701] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.427966] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1747.428841] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c3260f-5e9a-4272-bf61-276a43c5286d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.431461] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1747.431461] env[62525]: value = "task-1781916" [ 1747.431461] env[62525]: _type = "Task" [ 1747.431461] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.432163] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1747.435426] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.436852] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acd991b9-3a91-4f69-91ec-913bc616e3c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.439325] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11a07a77-92c1-4fe1-bdb6-b5c7f74186e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.441100] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274dd32a-df93-4ed7-84e7-39b92a75c243 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.473536] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfiguring VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1747.478859] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee5e672a-a863-45e0-9f86-0544443a56b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.493495] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1747.493495] env[62525]: value = "task-1781917" [ 1747.493495] env[62525]: _type = "Task" [ 1747.493495] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.493495] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1747.493495] env[62525]: value = "task-1781918" [ 1747.493495] env[62525]: _type = "Task" [ 1747.493495] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.499694] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c552780-53e6-4512-9bf7-fdd90aa9f362 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.501378] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.504322] env[62525]: INFO nova.compute.manager [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Terminating instance [ 1747.514303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.514550] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.514748] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.516315] env[62525]: DEBUG nova.compute.manager [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1747.516511] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1747.517234] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1747.517234] env[62525]: value = "task-1781919" [ 1747.517234] env[62525]: _type = "Task" [ 1747.517234] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.517946] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da95c3b7-63d9-4b2f-8a40-e15d8a866192 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.535327] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.535797] env[62525]: DEBUG nova.virt.block_device [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating existing volume attachment record: 4a988a83-74fb-4eaa-9554-8c85fe18e62a {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1747.537965] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.545122] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.545122] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1f3f696-9e0b-474e-a4d0-ff2d40215c41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.549033] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.554637] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1747.554637] env[62525]: value = "task-1781920" [ 1747.554637] env[62525]: _type = "Task" [ 1747.554637] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.563666] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.784146] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8c64c8-bbf1-4c00-b728-853e4341d6d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.793786] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a60139a-8e0d-4b59-a648-097c50468e27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.831823] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c44e38-bf7c-4a38-b0ce-50240fac9933 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.840712] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24550f4-9cf1-4e0f-afa8-11c086bba907 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.856085] env[62525]: DEBUG nova.compute.provider_tree [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.939842] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.939842] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f9c5e03-e49a-41e9-a6b5-42a45200fb81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.950225] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1747.950225] env[62525]: value = "task-1781922" [ 1747.950225] env[62525]: _type = "Task" [ 1747.950225] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.953482] env[62525]: DEBUG oslo_vmware.api [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238326} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.957093] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1747.957350] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1747.957575] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1747.957761] env[62525]: INFO nova.compute.manager [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1747.958015] env[62525]: DEBUG oslo.service.loopingcall [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1747.958660] env[62525]: DEBUG nova.compute.manager [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1747.958772] env[62525]: DEBUG nova.network.neutron [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1747.965588] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.014023] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781917, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.014323] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781918, 'name': PowerOffVM_Task, 'duration_secs': 0.211774} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.014567] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.014733] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1748.014977] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fd2aa6c-03e9-4013-a9c1-efa832832a20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.031129] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.063916] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781920, 'name': PowerOffVM_Task, 'duration_secs': 0.189824} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.064196] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.064364] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1748.064607] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34f7c11f-ccd0-4c0b-87f2-116877467c6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.122451] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1748.122681] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1748.122868] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] b0d6acae-8da3-4ed9-8832-b1e88338ed27 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.123237] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-256aae78-f503-4221-b1bd-1c76ce00931d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.130521] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1748.130521] env[62525]: value = "task-1781925" [ 1748.130521] env[62525]: _type = "Task" [ 1748.130521] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.138875] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.155233] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1748.155450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1748.155700] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleting the datastore file [datastore1] fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.155966] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61597925-af6f-4a16-aa2d-e95eb611b4ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.162963] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for the task: (returnval){ [ 1748.162963] env[62525]: value = "task-1781926" [ 1748.162963] env[62525]: _type = "Task" [ 1748.162963] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.172207] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.358171] env[62525]: DEBUG nova.scheduler.client.report [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1748.431777] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1748.432047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1748.432196] env[62525]: DEBUG nova.network.neutron [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1748.462720] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781922, 'name': PowerOffVM_Task, 'duration_secs': 0.267292} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.462720] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.462720] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1748.498639] env[62525]: DEBUG nova.compute.manager [req-dfb51cb3-85c6-48df-a6d6-ac907009344e req-cc122df1-7368-48f6-acb6-59e37ec0013f service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Received event network-vif-deleted-ece5e526-9d41-4006-8159-5c2401d7fbbf {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1748.498858] env[62525]: INFO nova.compute.manager [req-dfb51cb3-85c6-48df-a6d6-ac907009344e req-cc122df1-7368-48f6-acb6-59e37ec0013f service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Neutron deleted interface ece5e526-9d41-4006-8159-5c2401d7fbbf; detaching it from the instance and deleting it from the info cache [ 1748.499057] env[62525]: DEBUG nova.network.neutron [req-dfb51cb3-85c6-48df-a6d6-ac907009344e req-cc122df1-7368-48f6-acb6-59e37ec0013f service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.511245] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781917, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.511493] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1748.511697] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1748.511930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bb5ae66-a335-4ce8-9663-d488a4224fd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.520021] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1748.520021] env[62525]: value = "task-1781927" [ 1748.520021] env[62525]: _type = "Task" [ 1748.520021] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.532432] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.535586] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.656836] env[62525]: DEBUG oslo_vmware.api [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155338} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.656836] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.656836] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.656836] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.657046] env[62525]: INFO nova.compute.manager [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1748.657311] env[62525]: DEBUG oslo.service.loopingcall [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.657547] env[62525]: DEBUG nova.compute.manager [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1748.657620] env[62525]: DEBUG nova.network.neutron [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.674880] env[62525]: DEBUG oslo_vmware.api [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Task: {'id': task-1781926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170354} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.674880] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.674880] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.675085] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.679165] env[62525]: INFO nova.compute.manager [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1748.679319] env[62525]: DEBUG oslo.service.loopingcall [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.679561] env[62525]: DEBUG nova.compute.manager [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1748.679647] env[62525]: DEBUG nova.network.neutron [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.863354] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.002s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.863900] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1748.869712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.381s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.873057] env[62525]: DEBUG nova.objects.instance [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'resources' on Instance uuid ad4e94cc-d59c-4876-bf66-ec084350f875 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.970964] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.971123] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.971271] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.971413] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.971589] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.975237] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.975524] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.975710] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.975906] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.976106] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.976290] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.984391] env[62525]: DEBUG nova.network.neutron [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.989703] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30db9f2d-e920-41df-8a34-2006c0719674 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.003476] env[62525]: INFO nova.compute.manager [-] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Took 1.04 seconds to deallocate network for instance. [ 1749.004960] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9532e43-a89e-411b-855f-43f7f89092df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.012517] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1749.012517] env[62525]: value = "task-1781928" [ 1749.012517] env[62525]: _type = "Task" [ 1749.012517] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.022116] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254cf82d-eafe-4999-b38d-b075db195bba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.040138] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.054270] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071167} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.058137] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1749.061482] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.073285] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66302367-3b5f-4730-be2f-f11f0e13b971 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.076138] env[62525]: DEBUG nova.compute.manager [req-dfb51cb3-85c6-48df-a6d6-ac907009344e req-cc122df1-7368-48f6-acb6-59e37ec0013f service nova] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Detach interface failed, port_id=ece5e526-9d41-4006-8159-5c2401d7fbbf, reason: Instance 8abf0305-2000-4ffe-aa88-e2b355383ea3 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1749.097810] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1749.098767] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffa669aa-0c6c-42c8-8270-19fdd21298b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.119474] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1749.119474] env[62525]: value = "task-1781929" [ 1749.119474] env[62525]: _type = "Task" [ 1749.119474] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.127429] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781929, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.313324] env[62525]: DEBUG nova.network.neutron [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.373390] env[62525]: DEBUG nova.compute.utils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.375323] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1749.375602] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.422647] env[62525]: DEBUG nova.policy [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e50433248fb4eb088e90d25fcb67c7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3d5c15d37145aa84818a2ad88f307f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1749.513892] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.523057] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781928, 'name': ReconfigVM_Task, 'duration_secs': 0.360801} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.525889] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1749.550175] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.631067] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781929, 'name': ReconfigVM_Task, 'duration_secs': 0.299194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.631178] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1749.632196] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed86b5a-9ae4-42e0-b8c1-ca9841f238c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.634536] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b964fc9-02d2-4b85-93c8-ac24031d162f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.641443] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf8479d-e399-4674-8fa2-adcb99060e46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.644160] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1749.644160] env[62525]: value = "task-1781930" [ 1749.644160] env[62525]: _type = "Task" [ 1749.644160] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.674028] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Successfully created port: 7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1749.676870] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd925371-691e-424b-ae20-cd5883a7e75a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.682997] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781930, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.683607] env[62525]: DEBUG nova.network.neutron [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.687963] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdb325c-4699-49cd-895c-b5f427755216 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.701289] env[62525]: DEBUG nova.network.neutron [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.702695] env[62525]: DEBUG nova.compute.provider_tree [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.816723] env[62525]: DEBUG oslo_concurrency.lockutils [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.883439] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1750.032189] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.032442] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.032607] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.032794] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.032941] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.033104] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.033313] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.033561] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.033644] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.033804] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.033973] env[62525]: DEBUG nova.virt.hardware [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.039334] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfiguring VM instance instance-00000025 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1750.039671] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac7a9f28-2616-47fc-be0a-3820baad62b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.062657] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.063954] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1750.063954] env[62525]: value = "task-1781932" [ 1750.063954] env[62525]: _type = "Task" [ 1750.063954] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.071751] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.157821] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781930, 'name': Rename_Task, 'duration_secs': 0.144987} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.158134] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1750.158402] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-caf7b1f7-a72a-4396-a72d-97fd959ae054 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.164717] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1750.164717] env[62525]: value = "task-1781933" [ 1750.164717] env[62525]: _type = "Task" [ 1750.164717] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.182991] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.185586] env[62525]: INFO nova.compute.manager [-] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Took 1.53 seconds to deallocate network for instance. [ 1750.205032] env[62525]: INFO nova.compute.manager [-] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Took 1.52 seconds to deallocate network for instance. [ 1750.205710] env[62525]: DEBUG nova.scheduler.client.report [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.344579] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36e487a-d4f5-4589-b3f1-5baba38ccc8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.364889] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2abb25-0522-46fb-b26b-e3c6d65de87f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.372542] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1750.528014] env[62525]: DEBUG nova.compute.manager [req-7ccdaf0c-a758-4622-9e6a-ca4e973c9e53 req-bd526fc2-44a8-400d-97f2-4101682fc48a service nova] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Received event network-vif-deleted-971b9aa1-023e-45dd-b9dd-ddc0c852ee18 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.528479] env[62525]: DEBUG nova.compute.manager [req-7ccdaf0c-a758-4622-9e6a-ca4e973c9e53 req-bd526fc2-44a8-400d-97f2-4101682fc48a service nova] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Received event network-vif-deleted-6b0c2886-9fc0-4b1c-9a16-7e4f8a34f09b {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.548780] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.574515] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781932, 'name': ReconfigVM_Task, 'duration_secs': 0.410226} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.574817] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfigured VM instance instance-00000025 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1750.575598] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dc7008-046c-4de7-a587-e9401d6c3f99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.597287] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1750.597909] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d33dd29-ac8b-4bb2-9578-17a933d59b69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.615758] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1750.615758] env[62525]: value = "task-1781934" [ 1750.615758] env[62525]: _type = "Task" [ 1750.615758] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.623262] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781934, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.674567] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.692587] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.711919] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.714481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.805s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.714709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.714911] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1750.715326] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.554s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.718399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.719122] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d82c810-8fb0-4f83-a562-5f2d86df677f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.729079] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525ead45-a025-49d3-9c97-2b2c12d1d866 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.735548] env[62525]: INFO nova.scheduler.client.report [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocations for instance ad4e94cc-d59c-4876-bf66-ec084350f875 [ 1750.751755] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8e2872-c246-4652-903b-0a3791e3357d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.759096] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32748fc8-dd2d-43b5-8ff0-a567f2bba748 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.790706] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178509MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1750.790950] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.879487] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1750.879829] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1df4510-b168-4450-a916-4b82b10ede97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.887228] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1750.887228] env[62525]: value = "task-1781935" [ 1750.887228] env[62525]: _type = "Task" [ 1750.887228] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.893066] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1750.898152] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781935, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.933166] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.933287] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.933435] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.933634] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.933790] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.933942] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.934184] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.934375] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.934564] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.934736] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.934911] env[62525]: DEBUG nova.virt.hardware [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.935835] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4663a821-fa87-4e34-bfc5-bf85c85446f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.944387] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4362be4d-5bd9-41ee-9703-f0e7e7cb92d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.050361] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.129967] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781934, 'name': ReconfigVM_Task, 'duration_secs': 0.394379} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.130584] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312/6e9051e9-aa89-408f-8f62-533085dc1312.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1751.131392] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1751.164411] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Successfully updated port: 7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1751.181305] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.222086] env[62525]: INFO nova.compute.claims [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.243909] env[62525]: DEBUG oslo_concurrency.lockutils [None req-2fe323ac-4a6d-4655-a51c-fcc8175b7ec8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.278s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.244875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.881s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.245184] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-032a8223-3cfb-4eb7-985a-4c420babca5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.256794] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ebd3ec-6d67-4d65-8a34-4c6e4eea0f95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.398906] env[62525]: DEBUG oslo_vmware.api [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781935, 'name': PowerOnVM_Task, 'duration_secs': 0.425196} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.399212] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1751.399403] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-99e2ec70-a688-4325-919b-ba5d30f2d11d tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance 'c2baf40b-ea57-4552-8d56-45bcd49280ec' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1751.550461] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.639033] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588c310e-8adf-4df1-807f-a76e89d6480f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.657999] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853c3aad-aa9d-47fb-9bcf-db07d080aeb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.677632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.677775] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.678030] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1751.679136] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1751.691201] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.728865] env[62525]: INFO nova.compute.resource_tracker [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating resource usage from migration 0b843855-0a70-4bb1-89c8-136594ac87b9 [ 1751.795045] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "ad4e94cc-d59c-4876-bf66-ec084350f875" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.941160] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b70b9ca-7890-4231-b01b-7a51e65adb1a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.949010] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af385ee1-e85a-402f-a799-eeca9adc552f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.978588] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833e4115-ceeb-4246-b966-61e50c9ff972 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.986224] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6b3c86-c031-4397-bd83-b29d1a681286 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.001062] env[62525]: DEBUG nova.compute.provider_tree [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.052118] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.097414] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1752.097702] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369823', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'name': 'volume-b11ed6d8-04b4-43d5-90b7-a24844041af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'serial': 'b11ed6d8-04b4-43d5-90b7-a24844041af1'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1752.098626] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a765682e-7586-4181-a00c-5d6811c9296c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.117393] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841aa4a4-0535-43f5-acbb-5290b96467a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.146564] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-b11ed6d8-04b4-43d5-90b7-a24844041af1/volume-b11ed6d8-04b4-43d5-90b7-a24844041af1.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1752.147247] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b370ce6-29f0-43dd-b1bc-e511b9597123 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.167347] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1752.167347] env[62525]: value = "task-1781936" [ 1752.167347] env[62525]: _type = "Task" [ 1752.167347] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.175639] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.191185] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.239701] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1752.263232] env[62525]: DEBUG nova.network.neutron [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Port 7729ee20-ba8a-4607-95dd-4f5418171e89 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1752.411678] env[62525]: DEBUG nova.network.neutron [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Updating instance_info_cache with network_info: [{"id": "7803d107-2456-41cd-ba7a-ba4c281f5848", "address": "fa:16:3e:4c:b4:48", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7803d107-24", "ovs_interfaceid": "7803d107-2456-41cd-ba7a-ba4c281f5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.504579] env[62525]: DEBUG nova.scheduler.client.report [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1752.552190] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.557127] env[62525]: DEBUG nova.compute.manager [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Received event network-vif-plugged-7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.557345] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Acquiring lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.557521] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.557770] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.557800] env[62525]: DEBUG nova.compute.manager [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] No waiting events found dispatching network-vif-plugged-7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1752.557956] env[62525]: WARNING nova.compute.manager [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Received unexpected event network-vif-plugged-7803d107-2456-41cd-ba7a-ba4c281f5848 for instance with vm_state building and task_state spawning. [ 1752.558130] env[62525]: DEBUG nova.compute.manager [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Received event network-changed-7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.558280] env[62525]: DEBUG nova.compute.manager [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Refreshing instance network info cache due to event network-changed-7803d107-2456-41cd-ba7a-ba4c281f5848. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1752.558477] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Acquiring lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.677417] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781936, 'name': ReconfigVM_Task, 'duration_secs': 0.466205} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.677777] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-b11ed6d8-04b4-43d5-90b7-a24844041af1/volume-b11ed6d8-04b4-43d5-90b7-a24844041af1.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1752.682886] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69968491-a6a9-4f74-907f-bcc5ea52fba0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.706166] env[62525]: DEBUG oslo_vmware.api [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1781933, 'name': PowerOnVM_Task, 'duration_secs': 2.108537} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.710316] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1752.712183] env[62525]: INFO nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Took 10.27 seconds to spawn the instance on the hypervisor. [ 1752.712183] env[62525]: DEBUG nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1752.712183] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1752.712183] env[62525]: value = "task-1781937" [ 1752.712183] env[62525]: _type = "Task" [ 1752.712183] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.712465] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c3f575-de04-43e9-81c5-7dfbab8f151f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.731792] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.924934] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.925359] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance network_info: |[{"id": "7803d107-2456-41cd-ba7a-ba4c281f5848", "address": "fa:16:3e:4c:b4:48", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7803d107-24", "ovs_interfaceid": "7803d107-2456-41cd-ba7a-ba4c281f5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1752.927304] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Acquired lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.927521] env[62525]: DEBUG nova.network.neutron [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Refreshing network info cache for port 7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.929047] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:b4:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7803d107-2456-41cd-ba7a-ba4c281f5848', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1752.936789] env[62525]: DEBUG oslo.service.loopingcall [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.940278] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1752.941221] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a143cf6c-86bc-49cd-8f61-f79becf2ae9e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.962880] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1752.962880] env[62525]: value = "task-1781938" [ 1752.962880] env[62525]: _type = "Task" [ 1752.962880] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.971372] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781938, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.010143] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.295s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.010358] env[62525]: INFO nova.compute.manager [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Migrating [ 1753.020401] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.507s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.020644] env[62525]: DEBUG nova.objects.instance [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'resources' on Instance uuid 8abf0305-2000-4ffe-aa88-e2b355383ea3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1753.053121] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.153658] env[62525]: DEBUG nova.network.neutron [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Updated VIF entry in instance network info cache for port 7803d107-2456-41cd-ba7a-ba4c281f5848. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1753.154047] env[62525]: DEBUG nova.network.neutron [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Updating instance_info_cache with network_info: [{"id": "7803d107-2456-41cd-ba7a-ba4c281f5848", "address": "fa:16:3e:4c:b4:48", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7803d107-24", "ovs_interfaceid": "7803d107-2456-41cd-ba7a-ba4c281f5848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.225133] env[62525]: DEBUG oslo_vmware.api [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781937, 'name': ReconfigVM_Task, 'duration_secs': 0.149005} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.225425] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369823', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'name': 'volume-b11ed6d8-04b4-43d5-90b7-a24844041af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'serial': 'b11ed6d8-04b4-43d5-90b7-a24844041af1'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1753.240878] env[62525]: INFO nova.compute.manager [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Took 22.25 seconds to build instance. [ 1753.287072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.287314] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.287480] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.473849] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781938, 'name': CreateVM_Task, 'duration_secs': 0.36622} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.474033] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1753.474739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.474913] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.475319] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1753.475576] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1e184f0-cfda-42b9-b094-bc11d08b82a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.481855] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1753.481855] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e43e70-8eab-12af-427a-03720f6ef4eb" [ 1753.481855] env[62525]: _type = "Task" [ 1753.481855] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.490129] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e43e70-8eab-12af-427a-03720f6ef4eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.532605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.532793] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.533186] env[62525]: DEBUG nova.network.neutron [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1753.554198] env[62525]: DEBUG oslo_vmware.api [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781919, 'name': ReconfigVM_Task, 'duration_secs': 5.900464} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.557195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.557462] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Reconfigured VM to detach interface {{(pid=62525) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1753.656943] env[62525]: DEBUG oslo_concurrency.lockutils [req-817ba243-8763-40fa-8643-93a4c3b3aa25 req-b0750986-fb04-4bfe-8df4-448b9060256a service nova] Releasing lock "refresh_cache-0f401a95-7b62-4940-a819-d0d69fc4a59a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.743268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6ac007b1-1f3f-4282-be7b-0950373daf07 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.782s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.743519] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.377s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.743764] env[62525]: INFO nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] During sync_power_state the instance has a pending task (spawning). Skip. [ 1753.743883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.809594] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b900963-090b-48c0-92b7-d8a1511343ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.818692] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd0fa3e-5a57-4a55-be64-586e98fb3758 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.853046] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969c7295-88c9-428c-bc77-2165d4d049b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.861157] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2784188-ce9c-489e-944e-8f642c44050b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.868641] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.869251] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.869550] env[62525]: DEBUG nova.compute.manager [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Going to confirm migration 2 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1753.885162] env[62525]: DEBUG nova.compute.provider_tree [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.992961] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e43e70-8eab-12af-427a-03720f6ef4eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010123} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.993322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.993557] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1753.993791] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.993939] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.994149] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1753.994791] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2aac6baa-e4f5-4016-8b2c-46667a831684 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.010316] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1754.010502] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1754.011259] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f177dc81-c27c-4f08-ad95-e1f48361bfc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.016484] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1754.016484] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a922fc-314e-e7ec-f4c0-9a8d1fcb7740" [ 1754.016484] env[62525]: _type = "Task" [ 1754.016484] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.024322] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a922fc-314e-e7ec-f4c0-9a8d1fcb7740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.271415] env[62525]: DEBUG nova.objects.instance [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'flavor' on Instance uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.380735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.380927] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.381115] env[62525]: DEBUG nova.network.neutron [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.388443] env[62525]: DEBUG nova.scheduler.client.report [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1754.527404] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a922fc-314e-e7ec-f4c0-9a8d1fcb7740, 'name': SearchDatastore_Task, 'duration_secs': 0.035339} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.528206] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46a985f5-ba28-4de3-9510-f8f111afaad3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.534522] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1754.534522] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52aab140-2286-ab06-3b6f-b27a785031b2" [ 1754.534522] env[62525]: _type = "Task" [ 1754.534522] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.542193] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aab140-2286-ab06-3b6f-b27a785031b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.543172] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.543371] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquired lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.543515] env[62525]: DEBUG nova.network.neutron [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.543695] env[62525]: DEBUG nova.objects.instance [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'info_cache' on Instance uuid c2baf40b-ea57-4552-8d56-45bcd49280ec {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.643026] env[62525]: DEBUG nova.network.neutron [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.764817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.765237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.765553] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.765805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.766061] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.773497] env[62525]: INFO nova.compute.manager [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Terminating instance [ 1754.773497] env[62525]: DEBUG nova.compute.manager [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1754.773801] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1754.777615] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b858bf77-6990-4ead-b4e6-d6590d2bbfbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.784725] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a77fe1de-47a9-4488-88e8-a9023e1e9392 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.438s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.789627] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1754.789938] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab5a189c-1572-4e99-bcb5-7693a4e016a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.799414] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1754.799414] env[62525]: value = "task-1781939" [ 1754.799414] env[62525]: _type = "Task" [ 1754.799414] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.814819] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.898823] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.902531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.210s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.902792] env[62525]: DEBUG nova.objects.instance [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid b0d6acae-8da3-4ed9-8832-b1e88338ed27 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.905306] env[62525]: DEBUG nova.compute.manager [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Received event network-changed-d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.905501] env[62525]: DEBUG nova.compute.manager [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Refreshing instance network info cache due to event network-changed-d205d712-e184-43b0-93aa-3e45e7674f76. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1754.905729] env[62525]: DEBUG oslo_concurrency.lockutils [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] Acquiring lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.905898] env[62525]: DEBUG oslo_concurrency.lockutils [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] Acquired lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.906071] env[62525]: DEBUG nova.network.neutron [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Refreshing network info cache for port d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.923586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.923840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquired lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.924059] env[62525]: DEBUG nova.network.neutron [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.930638] env[62525]: INFO nova.scheduler.client.report [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted allocations for instance 8abf0305-2000-4ffe-aa88-e2b355383ea3 [ 1755.046015] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52aab140-2286-ab06-3b6f-b27a785031b2, 'name': SearchDatastore_Task, 'duration_secs': 0.04054} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.046368] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.046685] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1755.046992] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e231fbb-908d-4aab-8d82-38d9ba822007 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.055430] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1755.055430] env[62525]: value = "task-1781940" [ 1755.055430] env[62525]: _type = "Task" [ 1755.055430] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.064897] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.138588] env[62525]: DEBUG nova.network.neutron [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.146982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.310189] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781939, 'name': PowerOffVM_Task, 'duration_secs': 0.240175} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.310518] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1755.310695] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1755.310965] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0d9b71b-ac13-43c4-9c5e-676f9c4fbdcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.413818] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1755.414045] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1755.414247] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleting the datastore file [datastore1] 80fbfbda-07fb-43ab-be74-3cbdaf890a55 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1755.414874] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-972f650b-ab64-48c9-a1da-fca785bbe653 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.424884] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1755.424884] env[62525]: value = "task-1781942" [ 1755.424884] env[62525]: _type = "Task" [ 1755.424884] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.440037] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.441531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a6c5fbae-0565-44d9-9f18-2ad4ca6c8ffd tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "8abf0305-2000-4ffe-aa88-e2b355383ea3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.317s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.572689] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511056} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.572975] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.573323] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.573623] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f79d86c3-9fce-4bc4-b496-d413aa699e89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.582313] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1755.582313] env[62525]: value = "task-1781943" [ 1755.582313] env[62525]: _type = "Task" [ 1755.582313] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.591365] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.644623] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.661607] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.661607] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.693025] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55889f8-88bf-495e-afb9-122e60d4c5aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.702703] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8242fcf-92fb-48db-a27f-7e1f5f3eec80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.740484] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5762a5ce-9674-437a-af26-e0032589022c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.751626] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1809bf-ecf1-428f-9a79-8b595e4f46a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.765317] env[62525]: DEBUG nova.compute.provider_tree [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.933888] env[62525]: DEBUG nova.network.neutron [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updated VIF entry in instance network info cache for port d205d712-e184-43b0-93aa-3e45e7674f76. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1755.934553] env[62525]: DEBUG nova.network.neutron [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating instance_info_cache with network_info: [{"id": "d205d712-e184-43b0-93aa-3e45e7674f76", "address": "fa:16:3e:b7:71:3a", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd205d712-e1", "ovs_interfaceid": "d205d712-e184-43b0-93aa-3e45e7674f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.937862] env[62525]: INFO nova.network.neutron [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Port 6c93e506-f746-4d2e-922a-f389df5494a8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1755.938224] env[62525]: DEBUG nova.network.neutron [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [{"id": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "address": "fa:16:3e:4a:3a:29", "network": {"id": "58fc2de9-73a3-4f13-914c-ad34af02ccb5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1569148472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12f7b101b8848f28f2fc65ce3f0076c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77fa4a-b7", "ovs_interfaceid": "2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.943358] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.992024] env[62525]: DEBUG nova.network.neutron [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [{"id": "78deceee-4409-4d65-b4f0-dfc4e932c381", "address": "fa:16:3e:4f:e8:3d", "network": {"id": "a6c771f9-7caf-4225-bef3-42410b82f9ef", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-106368522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c87f1997d5c4739850790da5dd969fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78deceee-44", "ovs_interfaceid": "78deceee-4409-4d65-b4f0-dfc4e932c381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.091786] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073331} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.092094] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1756.092944] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42fe991-def2-4563-aad0-bbe35d7f7e2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.115471] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1756.115822] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e51f8b7d-9a60-4ba0-8722-a428fef285f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.135972] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1756.135972] env[62525]: value = "task-1781944" [ 1756.135972] env[62525]: _type = "Task" [ 1756.135972] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.144065] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.165212] env[62525]: INFO nova.compute.manager [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Detaching volume bab92fbe-a9ca-438a-83f9-e0041c650d07 [ 1756.176680] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e0acf4-f28c-4869-acd1-3189da8a1779 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.200845] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f13758d-1f9d-42d8-b469-38d56e31fa18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.208895] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1756.215095] env[62525]: INFO nova.virt.block_device [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attempting to driver detach volume bab92fbe-a9ca-438a-83f9-e0041c650d07 from mountpoint /dev/sdb [ 1756.215095] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1756.215095] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369819', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'name': 'volume-bab92fbe-a9ca-438a-83f9-e0041c650d07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'serial': 'bab92fbe-a9ca-438a-83f9-e0041c650d07'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1756.215095] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405417cc-c47b-4a67-bd4b-f1584b14bc6f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.243135] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27696c25-c1ab-4614-94cb-3d54ab7d9de8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.250405] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3339a33-05e6-40ae-bde7-4c2f8e9f5a56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.275170] env[62525]: DEBUG nova.scheduler.client.report [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1756.279892] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578f0b10-e7f6-4caa-9f02-4fa359b7909b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.297160] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] The volume has not been displaced from its original location: [datastore1] volume-bab92fbe-a9ca-438a-83f9-e0041c650d07/volume-bab92fbe-a9ca-438a-83f9-e0041c650d07.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1756.301416] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1756.301962] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ebb9cd1-effc-4474-8ffa-b8321323eb47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.320937] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1756.320937] env[62525]: value = "task-1781945" [ 1756.320937] env[62525]: _type = "Task" [ 1756.320937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.332196] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781945, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.435921] env[62525]: DEBUG oslo_vmware.api [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.802697} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.436260] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1756.436417] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1756.436598] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1756.436771] env[62525]: INFO nova.compute.manager [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1756.437018] env[62525]: DEBUG oslo.service.loopingcall [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.437459] env[62525]: DEBUG oslo_concurrency.lockutils [req-49998cd8-6825-423a-9cee-b087b77b8085 req-485aea49-18e2-40d3-ae38-76bcda55f415 service nova] Releasing lock "refresh_cache-50ee564d-7b27-4bc4-a95e-7717de865cfb" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.437846] env[62525]: DEBUG nova.compute.manager [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1756.437966] env[62525]: DEBUG nova.network.neutron [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1756.444705] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Releasing lock "refresh_cache-80fbfbda-07fb-43ab-be74-3cbdaf890a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.493552] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Releasing lock "refresh_cache-c2baf40b-ea57-4552-8d56-45bcd49280ec" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.493814] env[62525]: DEBUG nova.objects.instance [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lazy-loading 'migration_context' on Instance uuid c2baf40b-ea57-4552-8d56-45bcd49280ec {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.647096] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.666524] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e1db30-d0ce-4d7b-8ba7-2ded1155bb9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.688850] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1756.721788] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1756.721930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86de40f7-0fdd-466a-b337-5e41b9363ffd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.729706] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1756.729706] env[62525]: value = "task-1781946" [ 1756.729706] env[62525]: _type = "Task" [ 1756.729706] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.739197] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.780418] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.782929] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.065s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.784033] env[62525]: DEBUG nova.objects.instance [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lazy-loading 'resources' on Instance uuid fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.806657] env[62525]: INFO nova.scheduler.client.report [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance b0d6acae-8da3-4ed9-8832-b1e88338ed27 [ 1756.836107] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.924807] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.925036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.948733] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1b2cc0c9-e84b-4f00-ab98-b570fbfe1542 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "interface-80fbfbda-07fb-43ab-be74-3cbdaf890a55-6c93e506-f746-4d2e-922a-f389df5494a8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.698s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.997128] env[62525]: DEBUG nova.objects.base [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1756.998440] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc0266d-8920-4bb2-b74e-5ed5bf356284 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.018326] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-066a708b-a8a7-4107-9d43-c4901779e1f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.024482] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1757.024482] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b1e6b3-8849-3900-62ab-dd8497370a60" [ 1757.024482] env[62525]: _type = "Task" [ 1757.024482] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.033628] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b1e6b3-8849-3900-62ab-dd8497370a60, 'name': SearchDatastore_Task, 'duration_secs': 0.007303} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.033891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.129940] env[62525]: DEBUG nova.compute.manager [req-577f73aa-d41e-4958-ab2c-9eff40b6d94d req-cde76939-fa19-489d-9091-6c7a0ed09fb1 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Received event network-vif-deleted-2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1757.130238] env[62525]: INFO nova.compute.manager [req-577f73aa-d41e-4958-ab2c-9eff40b6d94d req-cde76939-fa19-489d-9091-6c7a0ed09fb1 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Neutron deleted interface 2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4; detaching it from the instance and deleting it from the info cache [ 1757.130426] env[62525]: DEBUG nova.network.neutron [req-577f73aa-d41e-4958-ab2c-9eff40b6d94d req-cde76939-fa19-489d-9091-6c7a0ed09fb1 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.147768] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781944, 'name': ReconfigVM_Task, 'duration_secs': 0.688432} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.147983] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1757.148626] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5b21ef3-1dc1-417b-9d8f-e3b1fa402f8e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.154910] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1757.154910] env[62525]: value = "task-1781947" [ 1757.154910] env[62525]: _type = "Task" [ 1757.154910] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.162749] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781947, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.195549] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1757.196027] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51eb0ae5-0f39-46f4-b35f-b4f58e889098 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.203647] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1757.203647] env[62525]: value = "task-1781948" [ 1757.203647] env[62525]: _type = "Task" [ 1757.203647] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.212056] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.240701] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781946, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.326574] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0001d733-9009-4bc0-9e1f-774d1d7a945d tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.984s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.327823] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.962s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.327903] env[62525]: INFO nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] During sync_power_state the instance has a pending task (spawning). Skip. [ 1757.328037] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "b0d6acae-8da3-4ed9-8832-b1e88338ed27" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.336161] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.430739] env[62525]: INFO nova.compute.manager [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Detaching volume 8adfb2c8-5f82-4a8c-83e6-582042005da0 [ 1757.465986] env[62525]: INFO nova.virt.block_device [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Attempting to driver detach volume 8adfb2c8-5f82-4a8c-83e6-582042005da0 from mountpoint /dev/sdb [ 1757.466244] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1757.466435] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369796', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'name': 'volume-8adfb2c8-5f82-4a8c-83e6-582042005da0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '82443424-6071-44b3-bd9a-f92a1a650f27', 'attached_at': '', 'detached_at': '', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'serial': '8adfb2c8-5f82-4a8c-83e6-582042005da0'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1757.467311] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d19875f-d593-4bb8-8385-867c7e584bbf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.498075] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40af031f-072f-4966-9b72-a2d87bfcbf96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.505515] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dead4f-a0d1-4261-9646-6d95beadb0ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.527107] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4668e5d0-2445-4b32-aff0-8885bf86da38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.542441] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] The volume has not been displaced from its original location: [datastore1] volume-8adfb2c8-5f82-4a8c-83e6-582042005da0/volume-8adfb2c8-5f82-4a8c-83e6-582042005da0.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1757.548092] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1757.550624] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd0f8a09-ffec-4eee-9524-52bc5a440eb9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.569489] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1757.569489] env[62525]: value = "task-1781949" [ 1757.569489] env[62525]: _type = "Task" [ 1757.569489] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.573584] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8058d027-ab57-4454-8c13-66a0157c5b4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.580453] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.582785] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279456a6-7d1f-4441-b5c4-c39b74db6a23 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.615087] env[62525]: DEBUG nova.network.neutron [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.617071] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2696bfb-a49a-4b84-8f07-ed7888b5d1fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.625283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d356543-3e2f-44ce-9e1e-e7a55382ad3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.640216] env[62525]: DEBUG nova.compute.provider_tree [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.641448] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25692533-9e80-4236-8e94-2704353d9a5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.650713] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56f4aa8-fbf8-4635-9ca8-296a7cee3bba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.670274] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781947, 'name': Rename_Task, 'duration_secs': 0.153181} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.670538] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.670778] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6708351e-80c8-418d-b249-44bed5ecce9e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.677610] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1757.677610] env[62525]: value = "task-1781950" [ 1757.677610] env[62525]: _type = "Task" [ 1757.677610] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.685530] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781950, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.697446] env[62525]: DEBUG nova.compute.manager [req-577f73aa-d41e-4958-ab2c-9eff40b6d94d req-cde76939-fa19-489d-9091-6c7a0ed09fb1 service nova] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Detach interface failed, port_id=2f77fa4a-b7ae-42b9-8f3d-d8f404e4b7a4, reason: Instance 80fbfbda-07fb-43ab-be74-3cbdaf890a55 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1757.713576] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.742317] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781946, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.833675] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781945, 'name': ReconfigVM_Task, 'duration_secs': 1.447829} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.833973] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1757.839130] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45a633e9-510e-4be0-9d58-3d645e85e2d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.854417] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1757.854417] env[62525]: value = "task-1781951" [ 1757.854417] env[62525]: _type = "Task" [ 1757.854417] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.865010] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781951, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.080998] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781949, 'name': ReconfigVM_Task, 'duration_secs': 0.258462} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.081306] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1758.085930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8dfdf3d-8ae1-4a14-8327-ebb897de434f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.102998] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1758.102998] env[62525]: value = "task-1781952" [ 1758.102998] env[62525]: _type = "Task" [ 1758.102998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.109226] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781952, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.120805] env[62525]: INFO nova.compute.manager [-] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Took 1.68 seconds to deallocate network for instance. [ 1758.145072] env[62525]: DEBUG nova.scheduler.client.report [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1758.191790] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781950, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.216860] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781948, 'name': PowerOffVM_Task, 'duration_secs': 0.673247} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.217329] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1758.217648] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1758.243096] env[62525]: DEBUG oslo_vmware.api [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781946, 'name': PowerOnVM_Task, 'duration_secs': 1.310878} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.243962] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.243962] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e8553e75-dc7e-43cd-81dc-c9c6b67bc973 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance '6e9051e9-aa89-408f-8f62-533085dc1312' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1758.364698] env[62525]: DEBUG oslo_vmware.api [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781951, 'name': ReconfigVM_Task, 'duration_secs': 0.159172} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.365131] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369819', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'name': 'volume-bab92fbe-a9ca-438a-83f9-e0041c650d07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'bab92fbe-a9ca-438a-83f9-e0041c650d07', 'serial': 'bab92fbe-a9ca-438a-83f9-e0041c650d07'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1758.612326] env[62525]: DEBUG oslo_vmware.api [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781952, 'name': ReconfigVM_Task, 'duration_secs': 0.194484} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.612746] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369796', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'name': 'volume-8adfb2c8-5f82-4a8c-83e6-582042005da0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '82443424-6071-44b3-bd9a-f92a1a650f27', 'attached_at': '', 'detached_at': '', 'volume_id': '8adfb2c8-5f82-4a8c-83e6-582042005da0', 'serial': '8adfb2c8-5f82-4a8c-83e6-582042005da0'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1758.629134] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.649466] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.652568] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.862s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.668149] env[62525]: INFO nova.scheduler.client.report [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Deleted allocations for instance fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3 [ 1758.688680] env[62525]: DEBUG oslo_vmware.api [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781950, 'name': PowerOnVM_Task, 'duration_secs': 0.588247} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.689025] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.689153] env[62525]: INFO nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1758.689341] env[62525]: DEBUG nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1758.690146] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a004fe5e-7a9f-4de3-a7c4-5539d10880f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.724992] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1758.725251] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1758.725409] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1758.725598] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1758.725748] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1758.725885] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1758.726141] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1758.726371] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1758.726649] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1758.726790] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1758.726988] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1758.734078] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6466c537-aa03-4c4a-ada1-23697c0a4646 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.754124] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1758.754124] env[62525]: value = "task-1781953" [ 1758.754124] env[62525]: _type = "Task" [ 1758.754124] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.764317] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781953, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.815321] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.815558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.909200] env[62525]: DEBUG nova.objects.instance [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'flavor' on Instance uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.164277] env[62525]: DEBUG nova.objects.instance [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid 82443424-6071-44b3-bd9a-f92a1a650f27 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.177283] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d4625c8e-7adc-4310-872e-f1894230acd4 tempest-ServerRescueNegativeTestJSON-1372597733 tempest-ServerRescueNegativeTestJSON-1372597733-project-member] Lock "fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.827s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.207800] env[62525]: INFO nova.compute.manager [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Took 20.62 seconds to build instance. [ 1759.268114] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781953, 'name': ReconfigVM_Task, 'duration_secs': 0.216563} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.268410] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.317863] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1759.667136] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Applying migration context for instance c2baf40b-ea57-4552-8d56-45bcd49280ec as it has an incoming, in-progress migration f4cfc1b6-3b0c-4bf2-a28a-d34d7e9c9a52. Migration status is finished {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1759.667457] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Applying migration context for instance 6e9051e9-aa89-408f-8f62-533085dc1312 as it has an incoming, in-progress migration d5a5cb92-367a-4848-a0d1-056710d84bb4. Migration status is finished {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1759.667552] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Applying migration context for instance 6624506c-56ad-41f4-8d90-ed34ccfb9385 as it has an incoming, in-progress migration 0b843855-0a70-4bb1-89c8-136594ac87b9. Migration status is migrating {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1759.669175] env[62525]: INFO nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating resource usage from migration f4cfc1b6-3b0c-4bf2-a28a-d34d7e9c9a52 [ 1759.669517] env[62525]: INFO nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating resource usage from migration d5a5cb92-367a-4848-a0d1-056710d84bb4 [ 1759.669808] env[62525]: INFO nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating resource usage from migration 0b843855-0a70-4bb1-89c8-136594ac87b9 [ 1759.698604] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.698604] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 82443424-6071-44b3-bd9a-f92a1a650f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: WARNING nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 80fbfbda-07fb-43ab-be74-3cbdaf890a55 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 462bc19d-1eaa-4c57-8ebb-412a97614f03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 130a3015-6caf-4374-a35f-9dd49bb8b3bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Migration f4cfc1b6-3b0c-4bf2-a28a-d34d7e9c9a52 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c2baf40b-ea57-4552-8d56-45bcd49280ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699677] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 50ee564d-7b27-4bc4-a95e-7717de865cfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699909] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Migration d5a5cb92-367a-4848-a0d1-056710d84bb4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1759.699909] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6e9051e9-aa89-408f-8f62-533085dc1312 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.699909] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0f401a95-7b62-4940-a819-d0d69fc4a59a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.700066] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Migration 0b843855-0a70-4bb1-89c8-136594ac87b9 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1759.700123] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 6624506c-56ad-41f4-8d90-ed34ccfb9385 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.709862] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d10e5235-ee82-4412-8a1d-055971eb4649 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.131s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.776692] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:16:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ebf24d7-e7f0-4555-bbf8-7b4230bb9b33',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2073680030',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1759.776692] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1759.776692] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1759.776692] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1759.777013] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1759.777013] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1759.777077] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1759.777232] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1759.777398] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1759.777557] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1759.777725] env[62525]: DEBUG nova.virt.hardware [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1759.783324] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfiguring VM instance instance-0000005f to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1759.785745] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63cdd7f6-f9f3-4f43-b45c-0a55147b8d0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.808303] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1759.808303] env[62525]: value = "task-1781954" [ 1759.808303] env[62525]: _type = "Task" [ 1759.808303] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.819020] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.843625] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.916328] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56c1514a-17c1-4bc1-9858-e39c3770e5d5 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.254s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.180295] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a8c859c8-6e40-4f5f-a0ec-fde9ed2ab0db tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.255s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.204104] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 4822fcae-9ffa-40fb-9870-2359cdd6b04d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1760.204370] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1760.204519] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1760.306119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.306119] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.322197] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781954, 'name': ReconfigVM_Task, 'duration_secs': 0.19125} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.322197] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfigured VM instance instance-0000005f to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1760.322792] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5976027-3011-4d1b-bc0a-e54149acc822 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.357328] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1760.358851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.359153] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.359594] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.359662] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.359905] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.365224] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5f37d94-f5ee-453d-9f2f-2d4273867113 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.380104] env[62525]: INFO nova.compute.manager [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Terminating instance [ 1760.382396] env[62525]: DEBUG nova.compute.manager [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1760.382606] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1760.383965] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3193c4e-bec7-45cb-b70f-85cecc60fd4a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.389261] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1760.389261] env[62525]: value = "task-1781955" [ 1760.389261] env[62525]: _type = "Task" [ 1760.389261] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.404123] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1760.405212] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02a08545-2179-44e0-ad1d-a992923a1abe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.412699] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.415903] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1760.415903] env[62525]: value = "task-1781956" [ 1760.415903] env[62525]: _type = "Task" [ 1760.415903] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.429210] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.447331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.447638] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.447941] env[62525]: DEBUG nova.compute.manager [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Going to confirm migration 3 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1760.535371] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d83392-0b8f-4fdb-b957-0cc99325fbc7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.546689] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa2e405-9764-4630-a07c-42b32ac329d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.581644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c32a70-8c79-4baa-9083-49aa7bdf297d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.590131] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2235667-4616-43b2-bb77-34bfb9d49a37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.605307] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1760.816707] env[62525]: INFO nova.compute.manager [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Detaching volume b11ed6d8-04b4-43d5-90b7-a24844041af1 [ 1760.852091] env[62525]: INFO nova.virt.block_device [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Attempting to driver detach volume b11ed6d8-04b4-43d5-90b7-a24844041af1 from mountpoint /dev/sdc [ 1760.852376] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1760.852567] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369823', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'name': 'volume-b11ed6d8-04b4-43d5-90b7-a24844041af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'serial': 'b11ed6d8-04b4-43d5-90b7-a24844041af1'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1760.853522] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dbcd3b-e2ae-4a2c-b2bf-ae8598f4e0c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.877351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da940cb-107a-4ad1-ac8c-765354b36f44 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.885249] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb249c5-49cd-488f-8a23-300d08cc1459 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.909888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d96acfc-c02c-457b-9231-9aa7170e56d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.936063] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] The volume has not been displaced from its original location: [datastore1] volume-b11ed6d8-04b4-43d5-90b7-a24844041af1/volume-b11ed6d8-04b4-43d5-90b7-a24844041af1.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1760.941593] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfiguring VM instance instance-00000057 to detach disk 2002 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1760.942033] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781955, 'name': ReconfigVM_Task, 'duration_secs': 0.331267} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.943580] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a333731a-b7cb-4d61-a8aa-57fd8415a1d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.957043] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1760.957315] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1760.967064] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781956, 'name': PowerOffVM_Task, 'duration_secs': 0.285675} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.969925] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1760.970153] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1760.970404] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5925249a-b95c-4f0e-a21d-6980ae1e4436 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.975937] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1760.975937] env[62525]: value = "task-1781957" [ 1760.975937] env[62525]: _type = "Task" [ 1760.975937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.984281] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781957, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.051856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.052136] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.052330] env[62525]: DEBUG nova.network.neutron [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1761.052519] env[62525]: DEBUG nova.objects.instance [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'info_cache' on Instance uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.055134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1761.055134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1761.055279] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleting the datastore file [datastore1] 82443424-6071-44b3-bd9a-f92a1a650f27 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1761.055681] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8598ffb6-5863-4e73-8a49-fae20d730eb4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.062563] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1761.062563] env[62525]: value = "task-1781959" [ 1761.062563] env[62525]: _type = "Task" [ 1761.062563] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.071769] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.108266] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1761.420047] env[62525]: INFO nova.compute.manager [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Rebuilding instance [ 1761.466902] env[62525]: DEBUG nova.compute.manager [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1761.468128] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5d353e-f317-4126-a3d4-c37503e0132f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.472697] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe470af9-24f2-4978-9a18-9ddd2c7da8c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.496946] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0339453e-b3c0-4d1c-8bef-fa11ed4dec42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.502716] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781957, 'name': ReconfigVM_Task, 'duration_secs': 0.494818} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.503294] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Reconfigured VM instance instance-00000057 to detach disk 2002 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1761.522421] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce5ddc23-1867-4de8-b941-e26d443a66d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.532672] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1761.543398] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1761.543398] env[62525]: value = "task-1781960" [ 1761.543398] env[62525]: _type = "Task" [ 1761.543398] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.551715] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.573424] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.612787] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1761.612986] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.961s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.613564] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.580s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.004071] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.004397] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9877866-a906-4681-a8f7-8d65ad86eecb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.012129] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1762.012129] env[62525]: value = "task-1781961" [ 1762.012129] env[62525]: _type = "Task" [ 1762.012129] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.020644] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.052794] env[62525]: DEBUG oslo_vmware.api [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781960, 'name': ReconfigVM_Task, 'duration_secs': 0.230521} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.053210] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369823', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'name': 'volume-b11ed6d8-04b4-43d5-90b7-a24844041af1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c96a9ef9-0ef7-41a2-bb0f-531f82980eb8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b11ed6d8-04b4-43d5-90b7-a24844041af1', 'serial': 'b11ed6d8-04b4-43d5-90b7-a24844041af1'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1762.072793] env[62525]: DEBUG oslo_vmware.api [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1781959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.645738} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.073058] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1762.073246] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1762.073420] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1762.073590] env[62525]: INFO nova.compute.manager [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1762.073820] env[62525]: DEBUG oslo.service.loopingcall [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.074011] env[62525]: DEBUG nova.compute.manager [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1762.074099] env[62525]: DEBUG nova.network.neutron [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1762.109083] env[62525]: DEBUG nova.network.neutron [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Port bdc798dc-53dc-400d-aff6-c49ee2c1f4fb binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1762.347015] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f237f97-8cf2-44d4-b29a-4c6e1141290f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.353414] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0962e61b-814f-47c7-9983-203056af1ae9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.388156] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc11cac0-b261-4520-8a7b-30b7f2c78cfb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.395764] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8274d119-af78-48dd-8519-60f66f76129c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.409561] env[62525]: DEBUG nova.compute.provider_tree [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.454773] env[62525]: DEBUG nova.network.neutron [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [{"id": "7729ee20-ba8a-4607-95dd-4f5418171e89", "address": "fa:16:3e:30:d7:58", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7729ee20-ba", "ovs_interfaceid": "7729ee20-ba8a-4607-95dd-4f5418171e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.521443] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781961, 'name': PowerOffVM_Task, 'duration_secs': 0.240121} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.525021] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.525021] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.525021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af01c5fa-33b9-4eab-b2b2-22d1c969d8c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.530211] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.530868] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be8656cb-b453-4dee-912f-10bc57c487ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.595604] env[62525]: DEBUG nova.objects.instance [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'flavor' on Instance uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.616592] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.617035] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.617340] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.617909] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91e3db12-577a-456e-a102-1b49f7dedc01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.624926] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1762.624926] env[62525]: value = "task-1781963" [ 1762.624926] env[62525]: _type = "Task" [ 1762.624926] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.632534] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.892667] env[62525]: DEBUG nova.compute.manager [req-e3e6b2e5-28c3-40eb-95e8-94cbf8a5d3c0 req-1cc1b4aa-ffe6-476d-b3a6-613310314d02 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Received event network-vif-deleted-fadf8b76-5c96-4ca1-a32f-c85a68c3fb21 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1762.892889] env[62525]: INFO nova.compute.manager [req-e3e6b2e5-28c3-40eb-95e8-94cbf8a5d3c0 req-1cc1b4aa-ffe6-476d-b3a6-613310314d02 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Neutron deleted interface fadf8b76-5c96-4ca1-a32f-c85a68c3fb21; detaching it from the instance and deleting it from the info cache [ 1762.893081] env[62525]: DEBUG nova.network.neutron [req-e3e6b2e5-28c3-40eb-95e8-94cbf8a5d3c0 req-1cc1b4aa-ffe6-476d-b3a6-613310314d02 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.912907] env[62525]: DEBUG nova.scheduler.client.report [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1762.959082] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-6e9051e9-aa89-408f-8f62-533085dc1312" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.960257] env[62525]: DEBUG nova.objects.instance [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'migration_context' on Instance uuid 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.139853] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.140457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.140767] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.148763] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228994} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.149414] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.149739] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.150065] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.365715] env[62525]: DEBUG nova.network.neutron [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.396112] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-033f211a-a43a-4253-ae24-c052c279da90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.406027] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9cfddc-4eb4-47ce-89cf-b798b424cd27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.436603] env[62525]: DEBUG nova.compute.manager [req-e3e6b2e5-28c3-40eb-95e8-94cbf8a5d3c0 req-1cc1b4aa-ffe6-476d-b3a6-613310314d02 service nova] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Detach interface failed, port_id=fadf8b76-5c96-4ca1-a32f-c85a68c3fb21, reason: Instance 82443424-6071-44b3-bd9a-f92a1a650f27 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1763.462645] env[62525]: DEBUG nova.objects.base [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Object Instance<6e9051e9-aa89-408f-8f62-533085dc1312> lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1763.463657] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d519de6-ded4-4900-bc56-d2a18cea39ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.485219] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79364cc9-c661-4e90-a363-d6e71feb19c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.490915] env[62525]: DEBUG oslo_vmware.api [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1763.490915] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524a2317-a7a4-ffb8-6639-b11a21f59cf4" [ 1763.490915] env[62525]: _type = "Task" [ 1763.490915] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.499428] env[62525]: DEBUG oslo_vmware.api [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524a2317-a7a4-ffb8-6639-b11a21f59cf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.602247] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a3093742-c865-49df-b2da-f0453df8d796 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.296s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.870162] env[62525]: INFO nova.compute.manager [-] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Took 1.80 seconds to deallocate network for instance. [ 1763.924030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.310s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.926997] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.298s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.927263] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.929514] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.086s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.931183] env[62525]: INFO nova.compute.claims [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1763.955037] env[62525]: INFO nova.scheduler.client.report [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted allocations for instance 80fbfbda-07fb-43ab-be74-3cbdaf890a55 [ 1764.001803] env[62525]: DEBUG oslo_vmware.api [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524a2317-a7a4-ffb8-6639-b11a21f59cf4, 'name': SearchDatastore_Task, 'duration_secs': 0.193183} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.002116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.185733] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1764.186089] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1764.186338] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1764.186574] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1764.186768] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1764.186928] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1764.187170] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1764.187386] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1764.187594] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1764.187795] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1764.188024] env[62525]: DEBUG nova.virt.hardware [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1764.189052] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24317323-4ee9-4d51-bf4c-7074dcc761e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.192875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.193046] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.193220] env[62525]: DEBUG nova.network.neutron [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.201682] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7113c6f-4514-4f79-b24b-c517b7ceed2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.217457] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:b4:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7803d107-2456-41cd-ba7a-ba4c281f5848', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1764.224931] env[62525]: DEBUG oslo.service.loopingcall [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1764.225439] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1764.225878] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cba63ae1-1053-4299-a9c5-a0d017edee5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.246059] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1764.246059] env[62525]: value = "task-1781964" [ 1764.246059] env[62525]: _type = "Task" [ 1764.246059] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.253288] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781964, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.379091] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.463042] env[62525]: DEBUG oslo_concurrency.lockutils [None req-588f1eb1-bde2-4a3f-8a34-7af2ca5f4b4b tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "80fbfbda-07fb-43ab-be74-3cbdaf890a55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.698s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.487021] env[62525]: INFO nova.scheduler.client.report [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocation for migration f4cfc1b6-3b0c-4bf2-a28a-d34d7e9c9a52 [ 1764.701374] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.701635] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.701857] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.702057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.702233] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.705270] env[62525]: INFO nova.compute.manager [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Terminating instance [ 1764.707335] env[62525]: DEBUG nova.compute.manager [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1764.707528] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1764.708549] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f3332-0733-4d31-8122-59de94ef22c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.717089] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.717333] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f0bf2b7-4c40-4302-b7c8-ecfba107bf6b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.723847] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1764.723847] env[62525]: value = "task-1781965" [ 1764.723847] env[62525]: _type = "Task" [ 1764.723847] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.733976] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.754882] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781964, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.910723] env[62525]: DEBUG nova.network.neutron [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.993496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.124s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.084761] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.085169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.085500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.085806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.086138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.089211] env[62525]: INFO nova.compute.manager [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Terminating instance [ 1765.091548] env[62525]: DEBUG nova.compute.manager [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1765.091852] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1765.093121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a817e5dc-75c7-47e7-b4bb-e54adb9d761b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.106459] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1765.106739] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e33b021d-b466-4db9-b43a-e6bda2ef3ab6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.116943] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1765.116943] env[62525]: value = "task-1781966" [ 1765.116943] env[62525]: _type = "Task" [ 1765.116943] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.131733] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781966, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.135410] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b559bb-77ff-447a-8668-8c37f90aa167 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.142587] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef82763-f646-4b90-b617-d4cebf31ee95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.172812] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0c7156-e271-47ef-bad8-3be74de4d9da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.180241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea36bb4-c817-4491-8d14-d1d37a0f4f80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.195170] env[62525]: DEBUG nova.compute.provider_tree [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.233696] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781965, 'name': PowerOffVM_Task, 'duration_secs': 0.195013} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.233989] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1765.234177] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1765.234437] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c44c3118-48cb-451e-bc6d-5f659c78c5a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.255825] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781964, 'name': CreateVM_Task, 'duration_secs': 0.554193} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.256066] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1765.256789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.256949] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.257303] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1765.257560] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b391da65-41cf-4a99-ba64-2ee46bb7cdf9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.263072] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1765.263072] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5267a289-a727-d8cc-d4f2-adf3b850192c" [ 1765.263072] env[62525]: _type = "Task" [ 1765.263072] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.271860] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5267a289-a727-d8cc-d4f2-adf3b850192c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.316039] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1765.316287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1765.316481] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleting the datastore file [datastore1] c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1765.316753] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9cd01b5-0208-4aad-8804-ec199b0e57c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.324415] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for the task: (returnval){ [ 1765.324415] env[62525]: value = "task-1781968" [ 1765.324415] env[62525]: _type = "Task" [ 1765.324415] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.332280] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.414657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.627463] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781966, 'name': PowerOffVM_Task, 'duration_secs': 0.216231} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.627724] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1765.627892] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1765.628191] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfd4758a-22f3-4564-b677-250a493634b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.699122] env[62525]: DEBUG nova.scheduler.client.report [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1765.703662] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1765.703873] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1765.704065] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleting the datastore file [datastore1] c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1765.704536] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b94b56e7-06a9-4352-b645-bacb57ec3a5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.710576] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for the task: (returnval){ [ 1765.710576] env[62525]: value = "task-1781970" [ 1765.710576] env[62525]: _type = "Task" [ 1765.710576] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.719449] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.773860] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5267a289-a727-d8cc-d4f2-adf3b850192c, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.774144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.774400] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1765.774636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.774783] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.774964] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1765.775244] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6665ec7-c808-4106-91ee-62a6cfadc2aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.783713] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1765.783895] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1765.784747] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c92f16c8-1ede-479c-a08d-d0068b8eea19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.790885] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1765.790885] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521a1cce-4732-7cd0-28cb-8f2ecc8c685c" [ 1765.790885] env[62525]: _type = "Task" [ 1765.790885] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.798415] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a1cce-4732-7cd0-28cb-8f2ecc8c685c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.833726] env[62525]: DEBUG oslo_vmware.api [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Task: {'id': task-1781968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141123} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.834042] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1765.834296] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1765.834508] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1765.834714] env[62525]: INFO nova.compute.manager [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1765.834994] env[62525]: DEBUG oslo.service.loopingcall [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.835245] env[62525]: DEBUG nova.compute.manager [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1765.835592] env[62525]: DEBUG nova.network.neutron [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1765.937303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f53b8df-49fa-4d83-9e80-306dc32cd133 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.962059] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d61059b-b09a-493e-b75c-6ac9c5285bb3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.969976] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1766.206040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.206680] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1766.209591] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.207s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.221371] env[62525]: DEBUG oslo_vmware.api [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Task: {'id': task-1781970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184336} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.222125] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.222360] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1766.222545] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1766.222721] env[62525]: INFO nova.compute.manager [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1766.223309] env[62525]: DEBUG oslo.service.loopingcall [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.223725] env[62525]: DEBUG nova.compute.manager [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1766.223823] env[62525]: DEBUG nova.network.neutron [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1766.301811] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a1cce-4732-7cd0-28cb-8f2ecc8c685c, 'name': SearchDatastore_Task, 'duration_secs': 0.008927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.302638] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-126423c9-da86-4ee0-872d-d23f47605a03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.308370] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1766.308370] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5219c29b-a1bb-085d-9812-1e7799b358f6" [ 1766.308370] env[62525]: _type = "Task" [ 1766.308370] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.317189] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5219c29b-a1bb-085d-9812-1e7799b358f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.360837] env[62525]: DEBUG nova.compute.manager [req-a174fc8d-3831-4805-9cb9-3337b9147f2d req-8499704e-ddef-4ab5-976f-6f8c177657ac service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Received event network-vif-deleted-0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1766.361056] env[62525]: INFO nova.compute.manager [req-a174fc8d-3831-4805-9cb9-3337b9147f2d req-8499704e-ddef-4ab5-976f-6f8c177657ac service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Neutron deleted interface 0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7; detaching it from the instance and deleting it from the info cache [ 1766.361246] env[62525]: DEBUG nova.network.neutron [req-a174fc8d-3831-4805-9cb9-3337b9147f2d req-8499704e-ddef-4ab5-976f-6f8c177657ac service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.401596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.401853] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.402086] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.402283] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.402457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.405262] env[62525]: INFO nova.compute.manager [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Terminating instance [ 1766.407533] env[62525]: DEBUG nova.compute.manager [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1766.407737] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1766.408977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ed5285-21ef-4407-8ad3-eab04af2ebf7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.417340] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1766.417796] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b18224c-8e1b-4340-98fd-3e39c13330a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.425202] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1766.425202] env[62525]: value = "task-1781971" [ 1766.425202] env[62525]: _type = "Task" [ 1766.425202] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.434392] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.476180] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1766.476555] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d65b4414-c3e6-4083-a2d1-0c18de698eec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.484710] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1766.484710] env[62525]: value = "task-1781972" [ 1766.484710] env[62525]: _type = "Task" [ 1766.484710] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.494433] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.713642] env[62525]: DEBUG nova.compute.utils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1766.718138] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1766.718343] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1766.760214] env[62525]: DEBUG nova.policy [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1766.823843] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5219c29b-a1bb-085d-9812-1e7799b358f6, 'name': SearchDatastore_Task, 'duration_secs': 0.037593} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.824223] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.824516] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1766.824817] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0e46bf4-f2ac-4107-97eb-0b95b439722a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.832108] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1766.832108] env[62525]: value = "task-1781973" [ 1766.832108] env[62525]: _type = "Task" [ 1766.832108] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.838253] env[62525]: DEBUG nova.network.neutron [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.843121] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.864347] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5c14637-abab-41a2-8915-f8ba6711d98d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.877343] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e2a50a-bdb7-4243-aaf8-25a301a77c37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.916313] env[62525]: DEBUG nova.compute.manager [req-a174fc8d-3831-4805-9cb9-3337b9147f2d req-8499704e-ddef-4ab5-976f-6f8c177657ac service nova] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Detach interface failed, port_id=0ad012c1-5e8b-43d0-871a-1d1bf8bf29a7, reason: Instance c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1766.936288] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781971, 'name': PowerOffVM_Task, 'duration_secs': 0.302055} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.939978] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1766.940348] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1766.940943] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4fe9808-c098-4ff4-b0fa-2329ab84c23a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.975702] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222a7f3f-be5c-432d-956b-7470c0898708 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.983888] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442bfd86-5e70-4974-9b0e-b8c968df1e1f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.996874] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781972, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.034043] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9a5d86-6a0d-4ac4-b484-f0a390c2cbd1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.037893] env[62525]: DEBUG nova.network.neutron [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.045798] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c4e09c-1af3-46fb-ba2a-db5f2e0e6229 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.063066] env[62525]: DEBUG nova.compute.provider_tree [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1767.125908] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1767.126466] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1767.127224] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleting the datastore file [datastore1] c2baf40b-ea57-4552-8d56-45bcd49280ec {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.127353] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc6701dd-b3f3-406d-bb59-768498b2d983 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.132224] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Successfully created port: f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1767.140533] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for the task: (returnval){ [ 1767.140533] env[62525]: value = "task-1781975" [ 1767.140533] env[62525]: _type = "Task" [ 1767.140533] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.150379] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.218837] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1767.344027] env[62525]: INFO nova.compute.manager [-] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Took 1.51 seconds to deallocate network for instance. [ 1767.344027] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781973, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.496826] env[62525]: DEBUG oslo_vmware.api [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1781972, 'name': PowerOnVM_Task, 'duration_secs': 0.53161} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.498104] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1767.498104] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-22332a5a-d143-466a-8d09-7428b3a0e05f tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance '6624506c-56ad-41f4-8d90-ed34ccfb9385' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1767.540800] env[62525]: INFO nova.compute.manager [-] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Took 1.32 seconds to deallocate network for instance. [ 1767.585375] env[62525]: ERROR nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [req-fb9b7521-37d5-4be6-8acf-971b68355f7c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fb9b7521-37d5-4be6-8acf-971b68355f7c"}]} [ 1767.603491] env[62525]: DEBUG nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1767.619605] env[62525]: DEBUG nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1767.619841] env[62525]: DEBUG nova.compute.provider_tree [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1767.635504] env[62525]: DEBUG nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1767.651109] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.654763] env[62525]: DEBUG nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1767.842435] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.985818} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.845080] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1767.845392] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1767.845844] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95f872e0-ff19-49eb-b87f-9ae97ddff972 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.851037] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.852515] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1767.852515] env[62525]: value = "task-1781976" [ 1767.852515] env[62525]: _type = "Task" [ 1767.852515] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.864940] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.869644] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6271df59-091b-4c95-8b57-82fa3d1ef94a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.876677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13907172-ac38-4053-b386-4928730c89a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.908097] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043bd2b0-99f3-4dab-b3c9-d823c27866cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.916049] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331888ab-643c-4ad9-a86a-c59fbe02d388 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.930382] env[62525]: DEBUG nova.compute.provider_tree [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1768.049097] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.151058] env[62525]: DEBUG oslo_vmware.api [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Task: {'id': task-1781975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.001458} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.151338] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1768.151527] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1768.151702] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.151874] env[62525]: INFO nova.compute.manager [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1768.152222] env[62525]: DEBUG oslo.service.loopingcall [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.152310] env[62525]: DEBUG nova.compute.manager [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1768.152403] env[62525]: DEBUG nova.network.neutron [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1768.233227] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1768.258958] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1768.259337] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1768.259552] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1768.259751] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1768.260099] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1768.260099] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1768.260360] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1768.260571] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1768.260943] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1768.260943] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1768.261104] env[62525]: DEBUG nova.virt.hardware [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1768.261964] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a373937-a21e-491e-a7a1-f5e8c54fc973 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.271248] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a3e513-6450-46a2-8a07-84759c4965fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.363380] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075022} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.363679] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1768.364505] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652ff580-d2a3-434d-8c44-5db6cef0791a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.387761] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1768.388144] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74067916-6326-46e9-82a7-deeeb2b7382f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.405056] env[62525]: DEBUG nova.compute.manager [req-cc713891-3e55-474e-8db1-2703dd066623 req-6780289d-7c1b-4746-801d-50d0e1b9c814 service nova] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Received event network-vif-deleted-c4d4ce3d-0244-4d4e-9fc7-de0583a3c74c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1768.411671] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1768.411671] env[62525]: value = "task-1781977" [ 1768.411671] env[62525]: _type = "Task" [ 1768.411671] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.421638] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.465020] env[62525]: DEBUG nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 128 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1768.465020] env[62525]: DEBUG nova.compute.provider_tree [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 128 to 129 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1768.465020] env[62525]: DEBUG nova.compute.provider_tree [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1768.655624] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Successfully updated port: f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1768.922348] env[62525]: DEBUG nova.network.neutron [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.923745] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781977, 'name': ReconfigVM_Task, 'duration_secs': 0.310759} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.924207] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a/0f401a95-7b62-4940-a819-d0d69fc4a59a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1768.925220] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9a3612f-8ec3-49d0-a2ff-a0c566a6dc0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.932459] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1768.932459] env[62525]: value = "task-1781978" [ 1768.932459] env[62525]: _type = "Task" [ 1768.932459] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.941514] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781978, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.158723] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.158875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.158950] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1769.425867] env[62525]: INFO nova.compute.manager [-] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Took 1.27 seconds to deallocate network for instance. [ 1769.442174] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781978, 'name': Rename_Task, 'duration_secs': 0.139263} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.442462] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1769.442708] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-448d5871-eae0-4784-a8e0-2d76b83c1bab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.449691] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1769.449691] env[62525]: value = "task-1781979" [ 1769.449691] env[62525]: _type = "Task" [ 1769.449691] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.457617] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.474463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.265s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.477340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.098s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.477554] env[62525]: DEBUG nova.objects.instance [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'resources' on Instance uuid 82443424-6071-44b3-bd9a-f92a1a650f27 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.585970] env[62525]: DEBUG nova.network.neutron [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Port bdc798dc-53dc-400d-aff6-c49ee2c1f4fb binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1769.586279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.586438] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.586602] env[62525]: DEBUG nova.network.neutron [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1769.689889] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1769.816419] env[62525]: DEBUG nova.network.neutron [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Updating instance_info_cache with network_info: [{"id": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "address": "fa:16:3e:47:e5:e1", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e58586-b2", "ovs_interfaceid": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.932900] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.959691] env[62525]: DEBUG oslo_vmware.api [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781979, 'name': PowerOnVM_Task, 'duration_secs': 0.483827} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.960463] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1769.960463] env[62525]: DEBUG nova.compute.manager [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1769.961536] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e05848-d4d8-4aa7-87fe-bc312c5e27e6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.032769] env[62525]: INFO nova.scheduler.client.report [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted allocation for migration d5a5cb92-367a-4848-a0d1-056710d84bb4 [ 1770.157167] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0be4d1-a1be-4317-9c99-32fbee9a5bbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.165841] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2375eaf-f300-4e31-80cf-21a457981fc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.200231] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7987213-8cf6-4bfc-82ef-ab5df9523bf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.207960] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06920e9-0d4a-4618-a1d1-9e0e744e96c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.222146] env[62525]: DEBUG nova.compute.provider_tree [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.319440] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.319770] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Instance network_info: |[{"id": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "address": "fa:16:3e:47:e5:e1", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e58586-b2", "ovs_interfaceid": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1770.320215] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:e5:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4e58586-b2fa-4f5d-aaf3-4837b1f78a22', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1770.327925] env[62525]: DEBUG oslo.service.loopingcall [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.329154] env[62525]: DEBUG nova.network.neutron [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.330401] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1770.330620] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1288f93-3af7-450c-88e0-d7e5071eb620 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.353059] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1770.353059] env[62525]: value = "task-1781980" [ 1770.353059] env[62525]: _type = "Task" [ 1770.353059] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.361212] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781980, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.471675] env[62525]: DEBUG nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Received event network-vif-deleted-78deceee-4409-4d65-b4f0-dfc4e932c381 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1770.471675] env[62525]: DEBUG nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Received event network-vif-plugged-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1770.471917] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Acquiring lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.472140] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.472239] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.472411] env[62525]: DEBUG nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] No waiting events found dispatching network-vif-plugged-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1770.472571] env[62525]: WARNING nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Received unexpected event network-vif-plugged-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 for instance with vm_state building and task_state spawning. [ 1770.472745] env[62525]: DEBUG nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Received event network-changed-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1770.472898] env[62525]: DEBUG nova.compute.manager [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Refreshing instance network info cache due to event network-changed-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1770.473111] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Acquiring lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.473246] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Acquired lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.473410] env[62525]: DEBUG nova.network.neutron [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Refreshing network info cache for port f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1770.484941] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.539271] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6a6edc50-a9d0-4bd0-90e2-4e036717a657 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.091s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.727374] env[62525]: DEBUG nova.scheduler.client.report [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.849023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.864726] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1781980, 'name': CreateVM_Task, 'duration_secs': 0.472563} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.864910] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1770.865572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.865744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.866084] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1770.866341] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ec6333-51df-4185-953a-a1fe682df88e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.870675] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1770.870675] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52dbcdad-d648-d0b3-b75d-059a7aa55bef" [ 1770.870675] env[62525]: _type = "Task" [ 1770.870675] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.878599] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dbcdad-d648-d0b3-b75d-059a7aa55bef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.152534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.152832] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.153093] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.153315] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.153515] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.155756] env[62525]: INFO nova.compute.manager [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Terminating instance [ 1771.157644] env[62525]: DEBUG nova.compute.manager [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1771.157881] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1771.158779] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389e9687-2633-410f-af56-6b785413c5b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.169108] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1771.169382] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-028b4ae1-9121-4c91-ab6a-e301b7c1bfdc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.176392] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1771.176392] env[62525]: value = "task-1781981" [ 1771.176392] env[62525]: _type = "Task" [ 1771.176392] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.177162] env[62525]: DEBUG nova.network.neutron [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Updated VIF entry in instance network info cache for port f4e58586-b2fa-4f5d-aaf3-4837b1f78a22. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1771.177531] env[62525]: DEBUG nova.network.neutron [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Updating instance_info_cache with network_info: [{"id": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "address": "fa:16:3e:47:e5:e1", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e58586-b2", "ovs_interfaceid": "f4e58586-b2fa-4f5d-aaf3-4837b1f78a22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.187333] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781981, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.232961] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.235558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.384s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.235866] env[62525]: DEBUG nova.objects.instance [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lazy-loading 'resources' on Instance uuid c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1771.253857] env[62525]: INFO nova.scheduler.client.report [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted allocations for instance 82443424-6071-44b3-bd9a-f92a1a650f27 [ 1771.352879] env[62525]: DEBUG nova.compute.manager [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62525) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1771.353197] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.385887] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52dbcdad-d648-d0b3-b75d-059a7aa55bef, 'name': SearchDatastore_Task, 'duration_secs': 0.009377} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.386921] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.387201] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1771.387482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.387672] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.387946] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.388575] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c9ca559-6703-44c6-9524-72c2e8cf308f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.416147] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.416345] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1771.417370] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f26f05-f47b-4f15-a9c3-96d8d7ddc420 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.423140] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1771.423140] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b48d9d-fc48-37cf-f4e5-baf529de81cf" [ 1771.423140] env[62525]: _type = "Task" [ 1771.423140] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.432045] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b48d9d-fc48-37cf-f4e5-baf529de81cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.684812] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3d5c661-4eac-4383-9ea4-337e9f66ebc0 req-2fd486fe-5f7f-48d8-9740-9778230d66de service nova] Releasing lock "refresh_cache-4822fcae-9ffa-40fb-9870-2359cdd6b04d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.691703] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781981, 'name': PowerOffVM_Task, 'duration_secs': 0.247127} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.691703] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1771.691703] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1771.692075] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feaae33b-0205-44ab-93ff-d6cd60ea2299 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.763864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e99d244d-92fc-484d-aa6a-10ee9d3a8e46 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "82443424-6071-44b3-bd9a-f92a1a650f27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.405s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.837392] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1771.837624] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1771.837761] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleting the datastore file [datastore1] 6e9051e9-aa89-408f-8f62-533085dc1312 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.838052] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b90f93d9-7ad8-4689-b600-0a67579ee620 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.848224] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1771.848224] env[62525]: value = "task-1781983" [ 1771.848224] env[62525]: _type = "Task" [ 1771.848224] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.856346] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.879856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.880131] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.880340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.880580] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.880774] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.882889] env[62525]: INFO nova.compute.manager [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Terminating instance [ 1771.887536] env[62525]: DEBUG nova.compute.manager [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1771.887536] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1771.888810] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ba63d3-ee67-4b75-8c73-c4a16503247b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.895900] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1771.898590] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdd9949d-0d9a-4dc3-b689-726dd7b2e246 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.906677] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1771.906677] env[62525]: value = "task-1781984" [ 1771.906677] env[62525]: _type = "Task" [ 1771.906677] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.915291] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.932699] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d89b191-8b3d-4605-a117-e3d05e5034c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.939402] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b48d9d-fc48-37cf-f4e5-baf529de81cf, 'name': SearchDatastore_Task, 'duration_secs': 0.008895} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.940702] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2232b285-6763-4da9-b20e-c2f1418f9794 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.948346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d907e7f-ba3b-4ab8-a687-e1976e746a83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.953332] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1771.953332] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f15b35-e056-8075-a37b-7f76e5bf3040" [ 1771.953332] env[62525]: _type = "Task" [ 1771.953332] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.984323] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209e30d5-45ab-439a-9910-17a8137f5191 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.990481] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f15b35-e056-8075-a37b-7f76e5bf3040, 'name': SearchDatastore_Task, 'duration_secs': 0.010538} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.991206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.991534] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4822fcae-9ffa-40fb-9870-2359cdd6b04d/4822fcae-9ffa-40fb-9870-2359cdd6b04d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1771.991877] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63746ba3-1f52-48ef-812f-9bfd1a00b633 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.997334] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83c764f-32f7-4658-b523-f05dd8c8adbd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.002175] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1772.002175] env[62525]: value = "task-1781985" [ 1772.002175] env[62525]: _type = "Task" [ 1772.002175] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.013332] env[62525]: DEBUG nova.compute.provider_tree [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.019342] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.345904] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.346308] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.359383] env[62525]: DEBUG oslo_vmware.api [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1781983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241062} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.360209] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1772.360397] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1772.360583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1772.360788] env[62525]: INFO nova.compute.manager [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1772.361244] env[62525]: DEBUG oslo.service.loopingcall [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.361333] env[62525]: DEBUG nova.compute.manager [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1772.361370] env[62525]: DEBUG nova.network.neutron [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1772.423145] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781984, 'name': PowerOffVM_Task, 'duration_secs': 0.208476} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.423145] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1772.423145] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1772.423145] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4644cae3-11be-4417-b511-7af5ab9144ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.514718] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.517500] env[62525]: DEBUG nova.scheduler.client.report [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.693024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.693024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.693024] env[62525]: INFO nova.compute.manager [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Shelving [ 1772.849658] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1772.868050] env[62525]: DEBUG nova.compute.manager [req-874e0544-bcf0-4308-846c-2b08267f902a req-653c57b2-a335-42d4-9d8a-36084c4f0bf5 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Received event network-vif-deleted-7729ee20-ba8a-4607-95dd-4f5418171e89 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1772.868050] env[62525]: INFO nova.compute.manager [req-874e0544-bcf0-4308-846c-2b08267f902a req-653c57b2-a335-42d4-9d8a-36084c4f0bf5 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Neutron deleted interface 7729ee20-ba8a-4607-95dd-4f5418171e89; detaching it from the instance and deleting it from the info cache [ 1772.868050] env[62525]: DEBUG nova.network.neutron [req-874e0544-bcf0-4308-846c-2b08267f902a req-653c57b2-a335-42d4-9d8a-36084c4f0bf5 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.014380] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.021605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.024712] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.975s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.024712] env[62525]: DEBUG nova.objects.instance [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lazy-loading 'resources' on Instance uuid c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.045775] env[62525]: INFO nova.scheduler.client.report [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Deleted allocations for instance c96a9ef9-0ef7-41a2-bb0f-531f82980eb8 [ 1773.200096] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.200356] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-671f6a6e-8239-4b1e-bb34-23ab6eb9491a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.207220] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1773.207220] env[62525]: value = "task-1781987" [ 1773.207220] env[62525]: _type = "Task" [ 1773.207220] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.215507] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781987, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.328135] env[62525]: DEBUG nova.network.neutron [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.366871] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1773.367211] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1773.367435] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] 0f401a95-7b62-4940-a819-d0d69fc4a59a {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1773.368293] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f19d7a4-9235-4634-8376-093edbce8e66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.370980] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32c91cf5-8f5e-415e-bc5b-7846193141f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.374290] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.377862] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1773.377862] env[62525]: value = "task-1781988" [ 1773.377862] env[62525]: _type = "Task" [ 1773.377862] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.385673] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d242fed-fabc-45c5-ba4e-c16ae053cf69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.401500] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.421408] env[62525]: DEBUG nova.compute.manager [req-874e0544-bcf0-4308-846c-2b08267f902a req-653c57b2-a335-42d4-9d8a-36084c4f0bf5 service nova] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Detach interface failed, port_id=7729ee20-ba8a-4607-95dd-4f5418171e89, reason: Instance 6e9051e9-aa89-408f-8f62-533085dc1312 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1773.445971] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.446932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.514793] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.556979] env[62525]: DEBUG oslo_concurrency.lockutils [None req-43ca8758-92f0-4bf5-a173-dc555cda4685 tempest-AttachVolumeTestJSON-794728895 tempest-AttachVolumeTestJSON-794728895-project-member] Lock "c96a9ef9-0ef7-41a2-bb0f-531f82980eb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.855s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.712226] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ead597-d4be-462a-b3f5-640e9998787c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.721279] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781987, 'name': PowerOffVM_Task, 'duration_secs': 0.234162} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.722207] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9c925c-82a3-4d4c-bcc4-da8d845e4b35 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.725271] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1773.725979] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18221eb3-8a32-4d82-a95f-c37291194d8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.768320] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722d51f8-611e-4cf3-bfe5-8c8da819a0f9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.771279] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3227a26d-bcbc-4d82-92ae-7ca7d3a7efea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.779743] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925e35d2-5f3d-4510-bdb3-93c9d0c91f32 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.795028] env[62525]: DEBUG nova.compute.provider_tree [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.831643] env[62525]: INFO nova.compute.manager [-] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Took 1.47 seconds to deallocate network for instance. [ 1773.887592] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.948613] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1774.014682] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.287992] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1774.288361] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-869b3dc8-4f25-4d53-b9f0-d821db120c36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.295997] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1774.295997] env[62525]: value = "task-1781989" [ 1774.295997] env[62525]: _type = "Task" [ 1774.295997] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.300366] env[62525]: DEBUG nova.scheduler.client.report [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1774.308918] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781989, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.338407] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.387474] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.471461] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.516707] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.806534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.783s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.808484] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781989, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.808952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.876s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.809169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.810817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.326s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.811049] env[62525]: DEBUG nova.objects.instance [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1774.829943] env[62525]: INFO nova.scheduler.client.report [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Deleted allocations for instance c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4 [ 1774.831607] env[62525]: INFO nova.scheduler.client.report [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Deleted allocations for instance c2baf40b-ea57-4552-8d56-45bcd49280ec [ 1774.890336] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.021181] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.308220] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781989, 'name': CreateSnapshot_Task, 'duration_secs': 0.862422} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.308539] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1775.309288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385691b0-7a5e-4524-9fba-59f87f5c8d4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.343088] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bef31c7c-5b41-4158-834e-9e0099b76801 tempest-AttachInterfacesTestJSON-1022572624 tempest-AttachInterfacesTestJSON-1022572624-project-member] Lock "c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.258s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.346099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-88dbf19e-323f-4807-9316-afc79dea2c94 tempest-DeleteServersTestJSON-956550704 tempest-DeleteServersTestJSON-956550704-project-member] Lock "c2baf40b-ea57-4552-8d56-45bcd49280ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.944s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.388245] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.517925] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781985, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.303663} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.518412] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 4822fcae-9ffa-40fb-9870-2359cdd6b04d/4822fcae-9ffa-40fb-9870-2359cdd6b04d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1775.518412] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1775.518811] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-509222a2-78af-453e-9f36-55c33bcef9b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.525550] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1775.525550] env[62525]: value = "task-1781991" [ 1775.525550] env[62525]: _type = "Task" [ 1775.525550] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.533455] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.829496] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1775.830647] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4ae1b416-cc05-42c5-be79-1b2efcaa821c tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.831722] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4929466c-9f2a-48d6-8fad-fe6ae94514f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.835032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 4.482s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.842119] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1775.842119] env[62525]: value = "task-1781992" [ 1775.842119] env[62525]: _type = "Task" [ 1775.842119] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.853105] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781992, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.890157] env[62525]: DEBUG oslo_vmware.api [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1781988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.022774} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.890471] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1775.890666] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1775.890843] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1775.891024] env[62525]: INFO nova.compute.manager [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Took 4.00 seconds to destroy the instance on the hypervisor. [ 1775.891265] env[62525]: DEBUG oslo.service.loopingcall [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.891452] env[62525]: DEBUG nova.compute.manager [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1775.891550] env[62525]: DEBUG nova.network.neutron [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1776.036100] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.159323} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.036407] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1776.037210] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115518f0-8012-4ad7-965b-6ab7b7d1f54a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.061853] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 4822fcae-9ffa-40fb-9870-2359cdd6b04d/4822fcae-9ffa-40fb-9870-2359cdd6b04d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1776.062193] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-830a1004-e919-4853-a6ec-439386ca91a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.086727] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1776.086727] env[62525]: value = "task-1781993" [ 1776.086727] env[62525]: _type = "Task" [ 1776.086727] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.100127] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781993, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.329262] env[62525]: DEBUG nova.compute.manager [req-3cc6ad6e-6821-4bbc-a6d2-b57dbfa86187 req-ee60cf9c-fa57-4532-8b94-e16b64779e40 service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Received event network-vif-deleted-7803d107-2456-41cd-ba7a-ba4c281f5848 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1776.329476] env[62525]: INFO nova.compute.manager [req-3cc6ad6e-6821-4bbc-a6d2-b57dbfa86187 req-ee60cf9c-fa57-4532-8b94-e16b64779e40 service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Neutron deleted interface 7803d107-2456-41cd-ba7a-ba4c281f5848; detaching it from the instance and deleting it from the info cache [ 1776.329728] env[62525]: DEBUG nova.network.neutron [req-3cc6ad6e-6821-4bbc-a6d2-b57dbfa86187 req-ee60cf9c-fa57-4532-8b94-e16b64779e40 service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.340323] env[62525]: DEBUG nova.objects.instance [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lazy-loading 'migration_context' on Instance uuid 6624506c-56ad-41f4-8d90-ed34ccfb9385 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1776.354868] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781992, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.597368] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.805815] env[62525]: DEBUG nova.network.neutron [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.833461] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26244a68-1c45-46c2-9ebb-8ae3f34b33b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.843298] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8344a5-4e52-4149-a7bc-155ea84ff5f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.879296] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781992, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.894450] env[62525]: DEBUG nova.compute.manager [req-3cc6ad6e-6821-4bbc-a6d2-b57dbfa86187 req-ee60cf9c-fa57-4532-8b94-e16b64779e40 service nova] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Detach interface failed, port_id=7803d107-2456-41cd-ba7a-ba4c281f5848, reason: Instance 0f401a95-7b62-4940-a819-d0d69fc4a59a could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1777.027462] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b465066d-43bf-4c4c-a7ee-7e9d7941c1b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.035359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff551028-68f1-4664-819f-617bd258339b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.070732] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277a522e-0570-49a6-813b-c1ddfdd377ce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.079384] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676127ca-f473-4ae2-b947-3ee16de8677e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.094108] env[62525]: DEBUG nova.compute.provider_tree [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1777.103627] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781993, 'name': ReconfigVM_Task, 'duration_secs': 0.695009} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.107020] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 4822fcae-9ffa-40fb-9870-2359cdd6b04d/4822fcae-9ffa-40fb-9870-2359cdd6b04d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1777.107020] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e56c552f-c960-4616-8b9f-8393cec2b688 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.111569] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1777.111569] env[62525]: value = "task-1781995" [ 1777.111569] env[62525]: _type = "Task" [ 1777.111569] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.120497] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781995, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.309087] env[62525]: INFO nova.compute.manager [-] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Took 1.42 seconds to deallocate network for instance. [ 1777.374384] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1781992, 'name': CloneVM_Task, 'duration_secs': 1.384087} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.374676] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Created linked-clone VM from snapshot [ 1777.375445] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027525ac-cea0-4504-9c77-2b011932d0a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.383552] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Uploading image 4bcdf024-5b6a-4101-8dee-23681d55ab37 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1777.417171] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1777.417171] env[62525]: value = "vm-369828" [ 1777.417171] env[62525]: _type = "VirtualMachine" [ 1777.417171] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1777.417639] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-abeb7855-6f80-4585-b555-df29f840c302 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.426377] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease: (returnval){ [ 1777.426377] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525b42c7-847f-fab6-0d6c-67a29e3b26e1" [ 1777.426377] env[62525]: _type = "HttpNfcLease" [ 1777.426377] env[62525]: } obtained for exporting VM: (result){ [ 1777.426377] env[62525]: value = "vm-369828" [ 1777.426377] env[62525]: _type = "VirtualMachine" [ 1777.426377] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1777.426687] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the lease: (returnval){ [ 1777.426687] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525b42c7-847f-fab6-0d6c-67a29e3b26e1" [ 1777.426687] env[62525]: _type = "HttpNfcLease" [ 1777.426687] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1777.433428] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1777.433428] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525b42c7-847f-fab6-0d6c-67a29e3b26e1" [ 1777.433428] env[62525]: _type = "HttpNfcLease" [ 1777.433428] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1777.599752] env[62525]: DEBUG nova.scheduler.client.report [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1777.622530] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781995, 'name': Rename_Task, 'duration_secs': 0.151696} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.623422] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1777.623703] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5d7f80f-2856-435d-9b6b-71f84b5b6078 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.630392] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1777.630392] env[62525]: value = "task-1781998" [ 1777.630392] env[62525]: _type = "Task" [ 1777.630392] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.638125] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.816875] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.936573] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1777.936573] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525b42c7-847f-fab6-0d6c-67a29e3b26e1" [ 1777.936573] env[62525]: _type = "HttpNfcLease" [ 1777.936573] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1777.936932] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1777.936932] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525b42c7-847f-fab6-0d6c-67a29e3b26e1" [ 1777.936932] env[62525]: _type = "HttpNfcLease" [ 1777.936932] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1777.938253] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb7e45e-245d-460c-8af0-34c476e4a181 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.945854] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1777.946235] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1778.045119] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0989fc85-f329-41ef-b5f2-eaad0edc3380 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.140633] env[62525]: DEBUG oslo_vmware.api [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781998, 'name': PowerOnVM_Task, 'duration_secs': 0.450425} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.140877] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1778.141096] env[62525]: INFO nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Took 9.91 seconds to spawn the instance on the hypervisor. [ 1778.141281] env[62525]: DEBUG nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1778.142047] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe89c7a-9c14-4ad0-a19e-2f78d884786a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.612856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.778s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.625402] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.250s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.626739] env[62525]: INFO nova.compute.claims [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1778.665288] env[62525]: INFO nova.compute.manager [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Took 18.84 seconds to build instance. [ 1779.169041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e4670d7c-8e6b-4a6c-9d68-be8195710292 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.353s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.246942] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.247318] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.247628] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.247831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.248050] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.250445] env[62525]: INFO nova.compute.manager [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Terminating instance [ 1779.252348] env[62525]: DEBUG nova.compute.manager [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1779.252579] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1779.253602] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4da8a59-c3cc-46b7-84a3-04c4f5536d62 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.261900] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.262172] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9645bc6f-e8ef-49af-8d36-22c35754672e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.271225] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1779.271225] env[62525]: value = "task-1781999" [ 1779.271225] env[62525]: _type = "Task" [ 1779.271225] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.285848] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.781949] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1781999, 'name': PowerOffVM_Task, 'duration_secs': 0.187062} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.784823] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1779.785013] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1779.786499] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c67a2664-b8b5-438d-a6b5-3eadbd55c99d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.848440] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d08f73-cc18-4c77-91c1-bab62dd77c89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.855437] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465fa5d4-c870-4fe8-ad58-08551679628d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.895134] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e126025-4011-43ea-8c47-e00b28302859 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.897982] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1779.898297] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1779.898534] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] 4822fcae-9ffa-40fb-9870-2359cdd6b04d {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1779.898853] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc313184-fc74-4b87-af5b-af3b8074773c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.907433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a978fd68-99b5-4fe7-a0d1-55bc757084c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.911771] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1779.911771] env[62525]: value = "task-1782001" [ 1779.911771] env[62525]: _type = "Task" [ 1779.911771] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.923733] env[62525]: DEBUG nova.compute.provider_tree [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.930462] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.178343] env[62525]: INFO nova.compute.manager [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Swapping old allocation on dict_keys(['bb89c0ac-8f56-43c6-9f73-fd897be63424']) held by migration 0b843855-0a70-4bb1-89c8-136594ac87b9 for instance [ 1780.218350] env[62525]: DEBUG nova.scheduler.client.report [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Overwriting current allocation {'allocations': {'bb89c0ac-8f56-43c6-9f73-fd897be63424': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 132}}, 'project_id': '45e20a581c76424a8f8c2c844f1e04f9', 'user_id': '0eb16caec01e491a9369f27194a2836a', 'consumer_generation': 1} on consumer 6624506c-56ad-41f4-8d90-ed34ccfb9385 {{(pid=62525) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1780.322852] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.323127] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquired lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.323228] env[62525]: DEBUG nova.network.neutron [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1780.422649] env[62525]: DEBUG oslo_vmware.api [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170105} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.423108] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1780.423396] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1780.424052] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1780.424326] env[62525]: INFO nova.compute.manager [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1780.424648] env[62525]: DEBUG oslo.service.loopingcall [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.424894] env[62525]: DEBUG nova.compute.manager [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1780.425058] env[62525]: DEBUG nova.network.neutron [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1780.427459] env[62525]: DEBUG nova.scheduler.client.report [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.933776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.934834] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1780.940497] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.602s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.940758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.944268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.473s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.946628] env[62525]: INFO nova.compute.claims [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1780.991539] env[62525]: INFO nova.scheduler.client.report [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted allocations for instance 6e9051e9-aa89-408f-8f62-533085dc1312 [ 1781.225427] env[62525]: DEBUG nova.compute.manager [req-1cba5f28-c842-439f-a0be-9eb04def3018 req-97b6de9c-e770-4dd7-b55e-2c23ef637dd5 service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Received event network-vif-deleted-f4e58586-b2fa-4f5d-aaf3-4837b1f78a22 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.227189] env[62525]: INFO nova.compute.manager [req-1cba5f28-c842-439f-a0be-9eb04def3018 req-97b6de9c-e770-4dd7-b55e-2c23ef637dd5 service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Neutron deleted interface f4e58586-b2fa-4f5d-aaf3-4837b1f78a22; detaching it from the instance and deleting it from the info cache [ 1781.227189] env[62525]: DEBUG nova.network.neutron [req-1cba5f28-c842-439f-a0be-9eb04def3018 req-97b6de9c-e770-4dd7-b55e-2c23ef637dd5 service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.302123] env[62525]: DEBUG nova.network.neutron [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [{"id": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "address": "fa:16:3e:d3:d5:d4", "network": {"id": "aef4cbfb-1eda-4176-9c39-21b13aded85a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.253", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "c6489820a95e4a7db91372ce766ff6d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdc798dc-53", "ovs_interfaceid": "bdc798dc-53dc-400d-aff6-c49ee2c1f4fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.444036] env[62525]: DEBUG nova.compute.utils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1781.447287] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1781.447530] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1781.505527] env[62525]: DEBUG nova.policy [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e50433248fb4eb088e90d25fcb67c7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3d5c15d37145aa84818a2ad88f307f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1781.509042] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad031ae1-fcc0-4fb5-9f1c-8685682a18cf tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "6e9051e9-aa89-408f-8f62-533085dc1312" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.355s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.695140] env[62525]: DEBUG nova.network.neutron [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.731173] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da913514-16dd-4c03-8d6b-673d70815f69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.748132] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3332816-4a1e-4d7b-ad69-89160ef5f348 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.784351] env[62525]: DEBUG nova.compute.manager [req-1cba5f28-c842-439f-a0be-9eb04def3018 req-97b6de9c-e770-4dd7-b55e-2c23ef637dd5 service nova] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Detach interface failed, port_id=f4e58586-b2fa-4f5d-aaf3-4837b1f78a22, reason: Instance 4822fcae-9ffa-40fb-9870-2359cdd6b04d could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1781.804347] env[62525]: DEBUG oslo_concurrency.lockutils [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Releasing lock "refresh_cache-6624506c-56ad-41f4-8d90-ed34ccfb9385" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.804605] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1781.804901] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab1c0c29-d571-483f-83d0-d99f68c8cf4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.819135] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1781.819135] env[62525]: value = "task-1782002" [ 1781.819135] env[62525]: _type = "Task" [ 1781.819135] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.829646] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.877265] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Successfully created port: 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1781.950694] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1782.169431] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e167d34c-5f8e-4d07-806b-1a7027a59a75 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.180401] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc18c35-5c4e-4bb2-a317-306a10742132 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.223619] env[62525]: INFO nova.compute.manager [-] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Took 1.80 seconds to deallocate network for instance. [ 1782.224642] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f920c1d8-794a-4b85-a633-bab6741330ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.239025] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dea7be-1dfb-4e8d-9625-32692991de22 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.254880] env[62525]: DEBUG nova.compute.provider_tree [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.329482] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782002, 'name': PowerOffVM_Task, 'duration_secs': 0.256817} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.329846] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1782.330725] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:16:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ebf24d7-e7f0-4555-bbf8-7b4230bb9b33',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2073680030',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.330864] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.331075] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.333857] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.339208] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8f7b68f-74ad-4c43-bdd7-062ef748622b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.356245] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1782.356245] env[62525]: value = "task-1782003" [ 1782.356245] env[62525]: _type = "Task" [ 1782.356245] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.365215] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782003, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.733167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.757028] env[62525]: DEBUG nova.scheduler.client.report [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.867623] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782003, 'name': ReconfigVM_Task, 'duration_secs': 0.152744} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.869039] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfc74e1-5422-4c03-a32a-cdf7d86a1f21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T00:16:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ebf24d7-e7f0-4555-bbf8-7b4230bb9b33',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2073680030',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.899312] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.900659] env[62525]: DEBUG nova.virt.hardware [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.900925] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab50aead-daa2-4a12-8163-21da725e0440 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.910138] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1782.910138] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529c31b3-fbcd-61c2-25cc-b7135525ce04" [ 1782.910138] env[62525]: _type = "Task" [ 1782.910138] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.919210] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529c31b3-fbcd-61c2-25cc-b7135525ce04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.963240] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1782.998708] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.998976] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.999153] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.999383] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.999470] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.999614] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.999833] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.999990] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1783.001609] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1783.001609] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1783.001609] env[62525]: DEBUG nova.virt.hardware [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1783.001609] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f050b635-715f-48d0-9ee9-059301098f39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.009853] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f783781-eef8-4876-81e0-b492598ca3e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.171240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.171240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.262158] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.262887] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1783.265609] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.449s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.265841] env[62525]: DEBUG nova.objects.instance [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'resources' on Instance uuid 0f401a95-7b62-4940-a819-d0d69fc4a59a {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1783.422510] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529c31b3-fbcd-61c2-25cc-b7135525ce04, 'name': SearchDatastore_Task, 'duration_secs': 0.011126} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.429944] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfiguring VM instance instance-0000005f to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1783.430753] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-163817b5-3c76-423d-beaa-0303c1263c91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.451079] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1783.451079] env[62525]: value = "task-1782004" [ 1783.451079] env[62525]: _type = "Task" [ 1783.451079] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.460653] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.509958] env[62525]: DEBUG nova.compute.manager [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Received event network-vif-plugged-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1783.510232] env[62525]: DEBUG oslo_concurrency.lockutils [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.510445] env[62525]: DEBUG oslo_concurrency.lockutils [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.510614] env[62525]: DEBUG oslo_concurrency.lockutils [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.510783] env[62525]: DEBUG nova.compute.manager [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] No waiting events found dispatching network-vif-plugged-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1783.510948] env[62525]: WARNING nova.compute.manager [req-5c5a77a6-110c-4759-a981-e198c65f2bf1 req-bba094a5-6664-406e-a589-4b7005ed45ca service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Received unexpected event network-vif-plugged-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e for instance with vm_state building and task_state spawning. [ 1783.673486] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1783.724778] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Successfully updated port: 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1783.771767] env[62525]: DEBUG nova.compute.utils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.780944] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1783.781168] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1783.820116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "9fcec068-4921-4a42-b948-6e61a44658ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.820665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.859280] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.859481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.866922] env[62525]: DEBUG nova.policy [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '391b41cf09fd42879d3f5cd3153c2045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a46df22dac6f473b8395f9302c3a4a75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1783.892687] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.892969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.963478] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782004, 'name': ReconfigVM_Task, 'duration_secs': 0.210282} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.966978] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfigured VM instance instance-0000005f to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1783.968163] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65b7c1b-bd90-417f-b37d-80c728cc8c38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.994133] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.997904] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57a57513-90b3-432e-a1d8-e438bc32f6b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.020229] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1784.020229] env[62525]: value = "task-1782005" [ 1784.020229] env[62525]: _type = "Task" [ 1784.020229] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.032487] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.072410] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53f6f28-c181-464e-a114-3aac3f84fadb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.080241] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6a4ef7-551c-47fe-9c83-5b3402fb1228 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.115969] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f01f1c8-a9e4-4fad-b901-31f15044b9ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.124708] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2400614e-7187-48bb-84b9-079f86cf4967 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.139247] env[62525]: DEBUG nova.compute.provider_tree [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.198846] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.228899] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.228899] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.228899] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.267620] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Successfully created port: ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1784.280567] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1784.323627] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1784.370132] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1784.396902] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1784.533422] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782005, 'name': ReconfigVM_Task, 'duration_secs': 0.278713} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.533422] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385/6624506c-56ad-41f4-8d90-ed34ccfb9385.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1784.534086] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b316909c-715c-4003-9bdb-56ac14830214 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.559028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6902a006-6970-4cf9-ade2-36cef4930ccb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.580566] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a734c0e-12aa-4619-a874-53f6fada4dca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.601085] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708e3bd9-7603-4580-95e6-e8b4c28ee49b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.608444] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1784.608731] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56eea152-8785-4654-87be-e5c78ddea3da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.615009] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1784.615009] env[62525]: value = "task-1782006" [ 1784.615009] env[62525]: _type = "Task" [ 1784.615009] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.622546] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.642759] env[62525]: DEBUG nova.scheduler.client.report [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1784.780839] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1784.852630] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.894109] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.918429] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.957827] env[62525]: DEBUG nova.network.neutron [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.125524] env[62525]: DEBUG oslo_vmware.api [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782006, 'name': PowerOnVM_Task, 'duration_secs': 0.392135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.125814] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.147772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.150595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.417s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.150875] env[62525]: DEBUG nova.objects.instance [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid 4822fcae-9ffa-40fb-9870-2359cdd6b04d {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.176043] env[62525]: INFO nova.scheduler.client.report [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocations for instance 0f401a95-7b62-4940-a819-d0d69fc4a59a [ 1785.291867] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1785.329298] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.329546] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.329715] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.329890] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.330050] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.330203] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.330411] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.330568] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.330734] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.330907] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.331458] env[62525]: DEBUG nova.virt.hardware [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.332038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d431ae-cfe2-4616-abe7-d6b6f8949077 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.342016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6028ab2e-e998-473e-a75c-fa362539d8ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.461581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.461920] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Instance network_info: |[{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1785.462358] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:65:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1785.470252] env[62525]: DEBUG oslo.service.loopingcall [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.470485] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1785.470718] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19cbfe9b-326e-4186-ab9f-94354218f325 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.491988] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1785.491988] env[62525]: value = "task-1782007" [ 1785.491988] env[62525]: _type = "Task" [ 1785.491988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.500810] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782007, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.642172] env[62525]: DEBUG nova.compute.manager [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Received event network-changed-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1785.642384] env[62525]: DEBUG nova.compute.manager [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Refreshing instance network info cache due to event network-changed-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1785.642603] env[62525]: DEBUG oslo_concurrency.lockutils [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] Acquiring lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.642748] env[62525]: DEBUG oslo_concurrency.lockutils [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] Acquired lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.642913] env[62525]: DEBUG nova.network.neutron [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Refreshing network info cache for port 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1785.684145] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cf72547e-b1b2-4be0-a59b-e954dbc2a3b8 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "0f401a95-7b62-4940-a819-d0d69fc4a59a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.804s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.843755] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Successfully updated port: ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1785.872931] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931aba13-1dfc-482a-b9b2-39bbbe764c16 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.882507] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d42913-8d8f-4f3f-b297-db894db75aae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.914192] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa98091d-3979-4d13-b30f-d9cca5d5d0c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.922904] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c73974f-c1bf-4575-805a-931970cece9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.941050] env[62525]: DEBUG nova.compute.provider_tree [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1786.003381] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782007, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.108115] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1786.109086] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ff74ff-1d9c-43e9-89f8-1293e806fec5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.115067] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1786.115237] env[62525]: ERROR oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk due to incomplete transfer. [ 1786.115444] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-85c88be0-1999-4174-afeb-1c0d35e777db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.121799] env[62525]: DEBUG oslo_vmware.rw_handles [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee76e1-484b-484d-de02-aa3615ae69a4/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1786.121991] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Uploaded image 4bcdf024-5b6a-4101-8dee-23681d55ab37 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1786.124194] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1786.124422] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3fac31db-438d-43c4-936d-94b6edc234ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.129451] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1786.129451] env[62525]: value = "task-1782008" [ 1786.129451] env[62525]: _type = "Task" [ 1786.129451] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.137967] env[62525]: INFO nova.compute.manager [None req-437d78f3-82a6-4bad-8cf1-0a8288369ea7 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance to original state: 'active' [ 1786.141111] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782008, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.331178] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "69a1093a-95d7-4cbb-90bf-1a213470872a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.331423] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.346659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.346798] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.346945] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1786.413933] env[62525]: DEBUG nova.network.neutron [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updated VIF entry in instance network info cache for port 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1786.413933] env[62525]: DEBUG nova.network.neutron [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.444764] env[62525]: DEBUG nova.scheduler.client.report [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1786.503298] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782007, 'name': CreateVM_Task, 'duration_secs': 0.556892} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.503547] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1786.504197] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.504369] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.504695] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1786.504956] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8d2c98d-4c38-42be-9dd4-f373fcbf4c2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.510264] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1786.510264] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d227b-d340-a012-2c81-1c300e086234" [ 1786.510264] env[62525]: _type = "Task" [ 1786.510264] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.518014] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524d227b-d340-a012-2c81-1c300e086234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.641067] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782008, 'name': Destroy_Task, 'duration_secs': 0.390466} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.641067] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Destroyed the VM [ 1786.641067] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1786.641067] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-af8b587f-bec7-4947-b186-d6b85b2b5a83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.650318] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1786.650318] env[62525]: value = "task-1782009" [ 1786.650318] env[62525]: _type = "Task" [ 1786.650318] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.658442] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782009, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.836196] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1786.880536] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1786.915514] env[62525]: DEBUG oslo_concurrency.lockutils [req-17298e5e-2a66-4819-a728-d874f111d74a req-b7af745d-17f4-47ef-99b8-8dfd9c100199 service nova] Releasing lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.948595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.798s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.951855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.753s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.956055] env[62525]: INFO nova.compute.claims [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.977899] env[62525]: INFO nova.scheduler.client.report [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance 4822fcae-9ffa-40fb-9870-2359cdd6b04d [ 1787.024532] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]524d227b-d340-a012-2c81-1c300e086234, 'name': SearchDatastore_Task, 'duration_secs': 0.012347} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.025090] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.025344] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.025571] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.025717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.025900] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.026187] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38705c6e-23f4-484a-afee-d7210432d9a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.035485] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.035702] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.036488] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38f99375-a843-4b47-b6c6-d9f288c81964 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.044067] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1787.044067] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c51b06-ab50-2748-06b1-752adfff3e74" [ 1787.044067] env[62525]: _type = "Task" [ 1787.044067] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.052897] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c51b06-ab50-2748-06b1-752adfff3e74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.053786] env[62525]: DEBUG nova.network.neutron [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updating instance_info_cache with network_info: [{"id": "ae436399-b552-425e-a202-0742904e7a48", "address": "fa:16:3e:49:f0:29", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae436399-b5", "ovs_interfaceid": "ae436399-b552-425e-a202-0742904e7a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.162681] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782009, 'name': RemoveSnapshot_Task, 'duration_secs': 0.359756} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.163191] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1787.163597] env[62525]: DEBUG nova.compute.manager [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1787.164335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5581a5a-a77e-4291-9850-78fcbd7baadf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.355259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.389794] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.389794] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.389794] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.389907] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.390429] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.392367] env[62525]: INFO nova.compute.manager [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Terminating instance [ 1787.394243] env[62525]: DEBUG nova.compute.manager [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1787.394447] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1787.395324] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddb6d0d-07c7-4ee9-bb8e-6c74691361db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.403437] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.403698] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e59469b-e57d-4a9d-855b-5cfaf2a50907 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.410296] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1787.410296] env[62525]: value = "task-1782010" [ 1787.410296] env[62525]: _type = "Task" [ 1787.410296] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.419264] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.485960] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0eea8b1e-02b7-4ebb-a967-a1422a29e087 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "4822fcae-9ffa-40fb-9870-2359cdd6b04d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.239s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.555295] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c51b06-ab50-2748-06b1-752adfff3e74, 'name': SearchDatastore_Task, 'duration_secs': 0.012318} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.556336] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.556632] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Instance network_info: |[{"id": "ae436399-b552-425e-a202-0742904e7a48", "address": "fa:16:3e:49:f0:29", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae436399-b5", "ovs_interfaceid": "ae436399-b552-425e-a202-0742904e7a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1787.556869] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abd69231-a5d0-4b05-9f1c-6da78b4df385 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.559585] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:f0:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afd3feb3-ffcc-4499-a2c2-eb6a48aefde9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae436399-b552-425e-a202-0742904e7a48', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1787.566691] env[62525]: DEBUG oslo.service.loopingcall [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1787.567253] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1787.567913] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c32c8a59-3030-4cec-be8f-8d7a0e42b902 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.583705] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1787.583705] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5292aad3-0489-df1b-a8fe-9d6fee135b05" [ 1787.583705] env[62525]: _type = "Task" [ 1787.583705] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.589053] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1787.589053] env[62525]: value = "task-1782011" [ 1787.589053] env[62525]: _type = "Task" [ 1787.589053] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.592797] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5292aad3-0489-df1b-a8fe-9d6fee135b05, 'name': SearchDatastore_Task, 'duration_secs': 0.015297} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.595760] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.596086] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1787.596472] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40ead086-7533-4c4d-b9c3-55a511615287 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.603657] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782011, 'name': CreateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.604959] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1787.604959] env[62525]: value = "task-1782012" [ 1787.604959] env[62525]: _type = "Task" [ 1787.604959] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.612754] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782012, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.679456] env[62525]: INFO nova.compute.manager [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Shelve offloading [ 1787.682597] env[62525]: DEBUG nova.compute.manager [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Received event network-vif-plugged-ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1787.682974] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.683311] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.683584] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.683855] env[62525]: DEBUG nova.compute.manager [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] No waiting events found dispatching network-vif-plugged-ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1787.684154] env[62525]: WARNING nova.compute.manager [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Received unexpected event network-vif-plugged-ae436399-b552-425e-a202-0742904e7a48 for instance with vm_state building and task_state spawning. [ 1787.684424] env[62525]: DEBUG nova.compute.manager [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Received event network-changed-ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1787.684700] env[62525]: DEBUG nova.compute.manager [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Refreshing instance network info cache due to event network-changed-ae436399-b552-425e-a202-0742904e7a48. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1787.684996] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Acquiring lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.685252] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Acquired lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.685528] env[62525]: DEBUG nova.network.neutron [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Refreshing network info cache for port ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.690412] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.690412] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a546707-cc38-4f4b-8636-6684a252aca4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.699097] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1787.699097] env[62525]: value = "task-1782013" [ 1787.699097] env[62525]: _type = "Task" [ 1787.699097] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.712863] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1787.713143] env[62525]: DEBUG nova.compute.manager [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1787.713975] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2f7b80-49b0-4eb8-8ffc-520ce384a108 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.722808] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.722808] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.722808] env[62525]: DEBUG nova.network.neutron [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1787.920871] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782010, 'name': PowerOffVM_Task, 'duration_secs': 0.331759} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.921207] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.921398] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1787.921664] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7240ae9-f71c-4245-a990-bb85372032a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.041089] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1788.041089] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1788.041089] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleting the datastore file [datastore1] 6624506c-56ad-41f4-8d90-ed34ccfb9385 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.043389] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-079eed46-0d55-46ee-9d06-ce8b15f2b61e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.051049] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1788.051049] env[62525]: value = "task-1782015" [ 1788.051049] env[62525]: _type = "Task" [ 1788.051049] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.060502] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.101660] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782011, 'name': CreateVM_Task, 'duration_secs': 0.408205} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.104435] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1788.105506] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.105691] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.106482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1788.106482] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-347a5dfc-9f47-4af8-bed9-deef92ecad14 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.115021] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1788.115021] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e09f5e-9359-a3e3-2731-ba88ff7ab9d0" [ 1788.115021] env[62525]: _type = "Task" [ 1788.115021] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.118391] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782012, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.128985] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e09f5e-9359-a3e3-2731-ba88ff7ab9d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.173077] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056ffedb-6a1c-4607-ac5f-daea97033e75 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.181049] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338ad659-6bbb-4535-8aaf-9bfb82d5f4e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.220303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c5ed8f-63b6-4b68-9765-ea4aebd2db0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.231084] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6987b2a5-9dc1-4f2a-83ca-eb8cdfd8a996 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.244886] env[62525]: DEBUG nova.compute.provider_tree [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.560298] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.568885] env[62525]: DEBUG nova.network.neutron [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updated VIF entry in instance network info cache for port ae436399-b552-425e-a202-0742904e7a48. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.569278] env[62525]: DEBUG nova.network.neutron [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updating instance_info_cache with network_info: [{"id": "ae436399-b552-425e-a202-0742904e7a48", "address": "fa:16:3e:49:f0:29", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae436399-b5", "ovs_interfaceid": "ae436399-b552-425e-a202-0742904e7a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.590736] env[62525]: DEBUG nova.network.neutron [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.617707] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782012, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.628997] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e09f5e-9359-a3e3-2731-ba88ff7ab9d0, 'name': SearchDatastore_Task, 'duration_secs': 0.175597} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.629366] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.629633] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1788.630031] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.630213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.630419] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1788.630707] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-670958b0-f102-4835-84b4-6ff5862f0879 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.648983] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1788.649388] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1788.650283] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84f1786a-d999-412e-852b-edc33f0b2e13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.655988] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1788.655988] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52944119-c302-cc53-79b7-4fc4302b667c" [ 1788.655988] env[62525]: _type = "Task" [ 1788.655988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.664308] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52944119-c302-cc53-79b7-4fc4302b667c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.748012] env[62525]: DEBUG nova.scheduler.client.report [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1788.770892] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.771167] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.060815] env[62525]: DEBUG oslo_vmware.api [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.773945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.061088] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.061278] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1789.061456] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1789.061626] env[62525]: INFO nova.compute.manager [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1789.061861] env[62525]: DEBUG oslo.service.loopingcall [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.062061] env[62525]: DEBUG nova.compute.manager [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1789.062156] env[62525]: DEBUG nova.network.neutron [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1789.072324] env[62525]: DEBUG oslo_concurrency.lockutils [req-b26f438c-65bc-4154-9652-d43ca675362c req-f413d496-e8f9-4c05-8221-bf44c6af7863 service nova] Releasing lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.093071] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.117049] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782012, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.107308} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.117287] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1789.117496] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1789.117736] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76d8661c-0463-4034-9437-eea09ee852b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.123913] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1789.123913] env[62525]: value = "task-1782016" [ 1789.123913] env[62525]: _type = "Task" [ 1789.123913] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.132820] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.167719] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52944119-c302-cc53-79b7-4fc4302b667c, 'name': SearchDatastore_Task, 'duration_secs': 0.040975} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.168526] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c466ff26-bee4-493a-8621-c40b5d68ae54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.173524] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1789.173524] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52047ca8-fd5c-6470-a408-54783811e5f1" [ 1789.173524] env[62525]: _type = "Task" [ 1789.173524] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.182420] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52047ca8-fd5c-6470-a408-54783811e5f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.252808] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.253339] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1789.256125] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.404s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.257437] env[62525]: INFO nova.compute.claims [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1789.273398] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1789.572522] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.573563] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd33a51d-99a1-4a8d-9347-0fe4668f5346 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.582655] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1789.582978] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78da64d4-1b12-4b09-90da-a3d68779d495 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.633297] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058551} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.633643] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.634521] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8595f241-31a3-4649-8c01-4f5f82f54bce {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.657482] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.658843] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-868ac768-e0d6-4475-a06c-90a17ddcfac1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.673014] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1789.673232] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1789.673422] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1789.673666] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffade4a0-85bf-4b87-b570-e0f0cefc8c0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.680498] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1789.680498] env[62525]: value = "task-1782018" [ 1789.680498] env[62525]: _type = "Task" [ 1789.680498] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.684241] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52047ca8-fd5c-6470-a408-54783811e5f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.688284] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.688590] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 160a67ea-5044-4597-9a61-82e05b8aa778/160a67ea-5044-4597-9a61-82e05b8aa778.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1789.688954] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1789.688954] env[62525]: value = "task-1782019" [ 1789.688954] env[62525]: _type = "Task" [ 1789.688954] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.689188] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd8a187e-5cb3-4592-81c2-d770a4bc3ebc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.697763] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782018, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.702752] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.703618] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1789.703618] env[62525]: value = "task-1782020" [ 1789.703618] env[62525]: _type = "Task" [ 1789.703618] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.710217] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Received event network-vif-deleted-bdc798dc-53dc-400d-aff6-c49ee2c1f4fb {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.710424] env[62525]: INFO nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Neutron deleted interface bdc798dc-53dc-400d-aff6-c49ee2c1f4fb; detaching it from the instance and deleting it from the info cache [ 1789.710490] env[62525]: DEBUG nova.network.neutron [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.714717] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.761436] env[62525]: DEBUG nova.compute.utils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1789.767032] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1789.767032] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1789.799349] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.807609] env[62525]: DEBUG nova.policy [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f6e065dce947b2a31313b33a08132c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3900af0b29fa40beb95a4260054c8e5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1789.945840] env[62525]: DEBUG nova.network.neutron [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.198896] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.204897] env[62525]: DEBUG oslo_vmware.api [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136498} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.207713] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.208022] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.208300] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.216741] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782020, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.217031] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78d75520-bf24-4dc7-95ed-79bf740acd1f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.226562] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4270c952-b73f-4489-b76e-cdfeee91cc71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.239440] env[62525]: INFO nova.scheduler.client.report [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted allocations for instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 [ 1790.261018] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Detach interface failed, port_id=bdc798dc-53dc-400d-aff6-c49ee2c1f4fb, reason: Instance 6624506c-56ad-41f4-8d90-ed34ccfb9385 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1790.261328] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-vif-unplugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1790.261592] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.261861] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.262105] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.262484] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] No waiting events found dispatching network-vif-unplugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1790.262484] env[62525]: WARNING nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received unexpected event network-vif-unplugged-b40cb3df-4673-45d7-8b69-c642a8939d96 for instance with vm_state shelved and task_state shelving_offloading. [ 1790.262670] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1790.262851] env[62525]: DEBUG nova.compute.manager [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing instance network info cache due to event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1790.263071] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.263245] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.263420] env[62525]: DEBUG nova.network.neutron [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1790.275540] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1790.292394] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Successfully created port: 931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1790.447913] env[62525]: INFO nova.compute.manager [-] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Took 1.39 seconds to deallocate network for instance. [ 1790.486281] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0495efd-7620-4010-9e23-e3641af8c2c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.494987] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88714e3b-3042-45d6-8441-ce6fbaf8bf42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.529865] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff90163-d71a-4417-8f14-9075658ca167 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.538364] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2451995c-e8a4-4f7f-bc54-462a12a5f90b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.552333] env[62525]: DEBUG nova.compute.provider_tree [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.695501] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782018, 'name': ReconfigVM_Task, 'duration_secs': 0.787071} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.695795] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.696462] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0578a905-47a6-4464-9eac-068ade0d364d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.702970] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1790.702970] env[62525]: value = "task-1782021" [ 1790.702970] env[62525]: _type = "Task" [ 1790.702970] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.716538] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782021, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.716761] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911036} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.716981] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 160a67ea-5044-4597-9a61-82e05b8aa778/160a67ea-5044-4597-9a61-82e05b8aa778.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1790.717197] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1790.717423] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6aac2ac1-7b19-41c7-bf76-5c03802d49e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.723874] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1790.723874] env[62525]: value = "task-1782022" [ 1790.723874] env[62525]: _type = "Task" [ 1790.723874] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.732366] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.743602] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.959305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.979408] env[62525]: DEBUG nova.network.neutron [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updated VIF entry in instance network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1790.979849] env[62525]: DEBUG nova.network.neutron [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb40cb3df-46", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.055923] env[62525]: DEBUG nova.scheduler.client.report [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1791.213228] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782021, 'name': Rename_Task, 'duration_secs': 0.145688} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.213524] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1791.213919] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-893eb54f-ce67-4810-95fd-a3cfec36a5d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.220456] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1791.220456] env[62525]: value = "task-1782023" [ 1791.220456] env[62525]: _type = "Task" [ 1791.220456] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.230501] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.235096] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066516} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.235287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1791.236107] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5e7c0d-ed74-4e9a-ab0b-d3b86544f4cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.257623] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 160a67ea-5044-4597-9a61-82e05b8aa778/160a67ea-5044-4597-9a61-82e05b8aa778.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1791.257874] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b89eb8f-1ede-4582-a593-587ca5a707dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.279766] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1791.279766] env[62525]: value = "task-1782024" [ 1791.279766] env[62525]: _type = "Task" [ 1791.279766] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.287801] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.291036] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1791.318699] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.318991] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.319172] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.319370] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.319517] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.319661] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.319870] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.320072] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.320253] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.320416] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.320588] env[62525]: DEBUG nova.virt.hardware [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.321498] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386ec83b-038c-427a-bbe0-fa72d0ed2f17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.329708] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2743e8-13ff-4945-a3dd-ceba82c67aca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.483722] env[62525]: DEBUG oslo_concurrency.lockutils [req-85b70392-74a9-411a-bcba-dbd7da4438fa req-5a05f2c1-bd01-4a21-ad78-a6d4a0331093 service nova] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.561367] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.561922] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1791.564578] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.671s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.566013] env[62525]: INFO nova.compute.claims [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1791.731283] env[62525]: DEBUG oslo_vmware.api [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782023, 'name': PowerOnVM_Task, 'duration_secs': 0.501666} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.731283] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.731643] env[62525]: INFO nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1791.731685] env[62525]: DEBUG nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1791.732515] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdd7145-e8de-463a-9135-3d1d2284ebde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.744272] env[62525]: DEBUG nova.compute.manager [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Received event network-vif-plugged-931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.744272] env[62525]: DEBUG oslo_concurrency.lockutils [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] Acquiring lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.744272] env[62525]: DEBUG oslo_concurrency.lockutils [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.744759] env[62525]: DEBUG oslo_concurrency.lockutils [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.745111] env[62525]: DEBUG nova.compute.manager [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] No waiting events found dispatching network-vif-plugged-931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.745426] env[62525]: WARNING nova.compute.manager [req-2fb1501c-a6fb-4777-9564-2aa6edcb292e req-d2b2bee4-3fb0-4f67-97ef-bcd95bf2a2a1 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Received unexpected event network-vif-plugged-931ab189-c48d-469b-8776-5e4d3c8cf77a for instance with vm_state building and task_state spawning. [ 1791.790662] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782024, 'name': ReconfigVM_Task, 'duration_secs': 0.369919} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.792720] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 160a67ea-5044-4597-9a61-82e05b8aa778/160a67ea-5044-4597-9a61-82e05b8aa778.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.792720] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4049e1c0-1179-4514-8d39-ad2d06c5f570 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.801774] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1791.801774] env[62525]: value = "task-1782025" [ 1791.801774] env[62525]: _type = "Task" [ 1791.801774] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.810796] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782025, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.831664] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Successfully updated port: 931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.049567] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.049991] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.070529] env[62525]: DEBUG nova.compute.utils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1792.075299] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1792.075496] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1792.124627] env[62525]: DEBUG nova.policy [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5821387073324c7f8617c3b649208db5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '777edc988da64356a254ea680afc0783', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1792.254093] env[62525]: INFO nova.compute.manager [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Took 18.90 seconds to build instance. [ 1792.309989] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782025, 'name': Rename_Task, 'duration_secs': 0.225234} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.310350] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1792.310601] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d54705e4-5fe9-438d-b443-9879d4fa7d4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.316876] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1792.316876] env[62525]: value = "task-1782026" [ 1792.316876] env[62525]: _type = "Task" [ 1792.316876] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.324634] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.334605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.334778] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.334960] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1792.364192] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Successfully created port: 8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1792.543424] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.552680] env[62525]: DEBUG nova.compute.utils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1792.579116] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1792.752111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c87c7b-a371-4e9d-ad83-a0ba2b007409 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.755852] env[62525]: DEBUG oslo_concurrency.lockutils [None req-63897f2e-5b4f-4b27-bb4f-1d09229c07ab tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.410s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.760224] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e343fff-ea6c-4ba4-8d99-30d801cbde33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.791380] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4556c5a8-3eff-4426-878a-cbd4fbb6ed30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.798854] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c06889-a6fe-4194-8dad-c5a2ca1a788d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.813503] env[62525]: DEBUG nova.compute.provider_tree [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.825171] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782026, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.865324] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1793.001884] env[62525]: DEBUG nova.network.neutron [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.055472] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.318437] env[62525]: DEBUG nova.scheduler.client.report [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.330101] env[62525]: DEBUG oslo_vmware.api [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782026, 'name': PowerOnVM_Task, 'duration_secs': 0.959614} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.330385] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1793.330585] env[62525]: INFO nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1793.331104] env[62525]: DEBUG nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1793.332599] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333ef4fd-c925-4c47-928c-48f24f5da6b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.505056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.505191] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Instance network_info: |[{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1793.505602] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:78:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '931ab189-c48d-469b-8776-5e4d3c8cf77a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1793.514162] env[62525]: DEBUG oslo.service.loopingcall [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.514397] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1793.514619] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b1e897-a4ae-4623-a722-42eb15c60660 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.535172] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1793.535172] env[62525]: value = "task-1782027" [ 1793.535172] env[62525]: _type = "Task" [ 1793.535172] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.542886] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782027, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.588368] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1793.615553] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1793.615825] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1793.615987] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1793.616362] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1793.616567] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1793.616721] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1793.617477] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1793.617999] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1793.618378] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1793.618579] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1793.618756] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1793.619672] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209f9c4b-6d5e-459d-95a5-2a14703f57f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.632443] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f987f77-1589-4d35-83b0-d1b8dc1f629e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.814770] env[62525]: DEBUG nova.compute.manager [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Received event network-changed-931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1793.815035] env[62525]: DEBUG nova.compute.manager [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Refreshing instance network info cache due to event network-changed-931ab189-c48d-469b-8776-5e4d3c8cf77a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1793.815213] env[62525]: DEBUG oslo_concurrency.lockutils [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.815369] env[62525]: DEBUG oslo_concurrency.lockutils [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.815530] env[62525]: DEBUG nova.network.neutron [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Refreshing network info cache for port 931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.822072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.822561] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1793.825417] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.907s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.826842] env[62525]: INFO nova.compute.claims [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1793.852595] env[62525]: INFO nova.compute.manager [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Took 19.40 seconds to build instance. [ 1793.923846] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Successfully updated port: 8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1794.045655] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782027, 'name': CreateVM_Task, 'duration_secs': 0.477086} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.045875] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1794.046673] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.046833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.047188] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1794.047442] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8034b9-226d-4ade-9bce-cc80907b4147 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.051720] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1794.051720] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5231dc76-e5f9-64e3-598e-b3f4a97b760c" [ 1794.051720] env[62525]: _type = "Task" [ 1794.051720] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.059983] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5231dc76-e5f9-64e3-598e-b3f4a97b760c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.127526] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.127758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.127982] env[62525]: INFO nova.compute.manager [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Attaching volume d818f996-4266-47c4-ab1e-9827cfc22a7d to /dev/sdb [ 1794.162016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e6596b-34eb-4d06-8d03-f2fbb2e2b319 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.169855] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11cba10-db98-4a40-9fde-4cf39a20905f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.183689] env[62525]: DEBUG nova.virt.block_device [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating existing volume attachment record: 5ca9cd7d-e50d-4059-852e-3497ca337984 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1794.333340] env[62525]: DEBUG nova.compute.utils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1794.337174] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1794.337174] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1794.355036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-131af0a1-7533-4f9a-b87b-616d41626e06 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.908s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.419097] env[62525]: DEBUG nova.policy [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5821387073324c7f8617c3b649208db5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '777edc988da64356a254ea680afc0783', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1794.425731] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.425824] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.425957] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.561980] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5231dc76-e5f9-64e3-598e-b3f4a97b760c, 'name': SearchDatastore_Task, 'duration_secs': 0.009782} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.562423] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.562668] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1794.562927] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.563301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.563540] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.563806] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9867ee28-f5f3-41f4-854d-36e968702c3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.572946] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.573308] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1794.574043] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d6deda1-5206-491a-8cab-44c310ee5ee1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.580645] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1794.580645] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c2b48d-200d-0c5b-c6fc-c68c1a7d7193" [ 1794.580645] env[62525]: _type = "Task" [ 1794.580645] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.588976] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c2b48d-200d-0c5b-c6fc-c68c1a7d7193, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.669270] env[62525]: DEBUG nova.network.neutron [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updated VIF entry in instance network info cache for port 931ab189-c48d-469b-8776-5e4d3c8cf77a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1794.669270] env[62525]: DEBUG nova.network.neutron [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.785235] env[62525]: DEBUG nova.compute.manager [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1794.794200] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Successfully created port: 3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1794.838894] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1794.986477] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1795.014016] env[62525]: DEBUG nova.compute.manager [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Received event network-changed-ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.015228] env[62525]: DEBUG nova.compute.manager [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Refreshing instance network info cache due to event network-changed-ae436399-b552-425e-a202-0742904e7a48. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1795.015626] env[62525]: DEBUG oslo_concurrency.lockutils [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] Acquiring lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.015626] env[62525]: DEBUG oslo_concurrency.lockutils [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] Acquired lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.015843] env[62525]: DEBUG nova.network.neutron [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Refreshing network info cache for port ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1795.093856] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c2b48d-200d-0c5b-c6fc-c68c1a7d7193, 'name': SearchDatastore_Task, 'duration_secs': 0.009914} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.096444] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bed8ea8-0237-4f49-90eb-fc1f532ed5fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.099198] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-389f0130-6c4b-41c5-9431-de415ce3e40e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.105134] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1795.105134] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52345771-e816-cf4f-8e16-616004cf7ef7" [ 1795.105134] env[62525]: _type = "Task" [ 1795.105134] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.113938] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795adb38-a3c9-48c6-9db2-d2bdff612212 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.122833] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52345771-e816-cf4f-8e16-616004cf7ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.146799] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.146943] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1795.147388] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea65415c-9f7f-4eee-9ba5-1c191cca7190 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.150519] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06ca9dd-fcb2-49af-8d8a-786df10fd3bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.159025] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e487be-b1d1-44c8-8e1b-0997e95f7300 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.163188] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1795.163188] env[62525]: value = "task-1782031" [ 1795.163188] env[62525]: _type = "Task" [ 1795.163188] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.179570] env[62525]: DEBUG oslo_concurrency.lockutils [req-b7c5fdb8-1688-430f-a2bc-e19b2a7f1d37 req-ae30dceb-71b9-4606-b744-f0b13c705b50 service nova] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.180233] env[62525]: DEBUG nova.compute.provider_tree [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1795.186483] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.204579] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Updating instance_info_cache with network_info: [{"id": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "address": "fa:16:3e:2a:7c:c8", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b36d9bf-8d", "ovs_interfaceid": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.305313] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.675383] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782031, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.682962] env[62525]: DEBUG nova.scheduler.client.report [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1795.709196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.709547] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Instance network_info: |[{"id": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "address": "fa:16:3e:2a:7c:c8", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b36d9bf-8d", "ovs_interfaceid": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1795.710353] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:7c:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b36d9bf-8d2a-449b-9b2e-813c8c5a1593', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1795.718111] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Creating folder: Project (777edc988da64356a254ea680afc0783). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1795.721179] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3193fda0-5f67-437c-9827-fb7840e454b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.732423] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Created folder: Project (777edc988da64356a254ea680afc0783) in parent group-v369553. [ 1795.732423] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Creating folder: Instances. Parent ref: group-v369834. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1795.732671] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fba30d7-f303-4b70-98be-d22012308e64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.742473] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Created folder: Instances in parent group-v369834. [ 1795.742729] env[62525]: DEBUG oslo.service.loopingcall [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1795.742934] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1795.743174] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ceb5c88-5500-4147-a72e-d1209e2d03d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.763035] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1795.763035] env[62525]: value = "task-1782034" [ 1795.763035] env[62525]: _type = "Task" [ 1795.763035] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.764030] env[62525]: DEBUG nova.network.neutron [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updated VIF entry in instance network info cache for port ae436399-b552-425e-a202-0742904e7a48. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.764389] env[62525]: DEBUG nova.network.neutron [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updating instance_info_cache with network_info: [{"id": "ae436399-b552-425e-a202-0742904e7a48", "address": "fa:16:3e:49:f0:29", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae436399-b5", "ovs_interfaceid": "ae436399-b552-425e-a202-0742904e7a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.774549] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782034, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.845095] env[62525]: DEBUG nova.compute.manager [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Received event network-vif-plugged-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.845405] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Acquiring lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.845700] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.845965] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.846237] env[62525]: DEBUG nova.compute.manager [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] No waiting events found dispatching network-vif-plugged-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.846538] env[62525]: WARNING nova.compute.manager [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Received unexpected event network-vif-plugged-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 for instance with vm_state building and task_state spawning. [ 1795.846754] env[62525]: DEBUG nova.compute.manager [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Received event network-changed-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.847056] env[62525]: DEBUG nova.compute.manager [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Refreshing instance network info cache due to event network-changed-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1795.847282] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Acquiring lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.847480] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Acquired lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.847718] env[62525]: DEBUG nova.network.neutron [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Refreshing network info cache for port 8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1795.850677] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1795.878406] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1795.878725] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1795.878927] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1795.879165] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1795.879326] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1795.879495] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1795.879701] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1795.879880] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1795.880087] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1795.880273] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1795.880472] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1795.881372] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89abf1d8-ff74-4f08-89fa-6ec664a62fe1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.889767] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610ae4cc-e3d9-49b2-a0b0-c3bf626eb398 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.176750] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782031, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.916385} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.177168] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1796.177449] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1796.177750] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04759cf4-b5f4-47e8-918d-a247e188c186 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.184635] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1796.184635] env[62525]: value = "task-1782035" [ 1796.184635] env[62525]: _type = "Task" [ 1796.184635] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.190615] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.191126] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1796.193689] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.839s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.195102] env[62525]: INFO nova.compute.claims [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.202533] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.270216] env[62525]: DEBUG oslo_concurrency.lockutils [req-36f579ab-3908-40fb-8019-d626dfd4cfb1 req-fa5952f1-148a-460b-a65e-7c976ab0b620 service nova] Releasing lock "refresh_cache-160a67ea-5044-4597-9a61-82e05b8aa778" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.274809] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782034, 'name': CreateVM_Task, 'duration_secs': 0.377667} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.274959] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1796.275669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.275824] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.276173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1796.276427] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78a5d661-dbb2-4b5a-9875-3eef54ba3c99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.281338] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1796.281338] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b15327-cf39-e1e6-6905-98dbd46e7c32" [ 1796.281338] env[62525]: _type = "Task" [ 1796.281338] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.289221] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b15327-cf39-e1e6-6905-98dbd46e7c32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.310935] env[62525]: DEBUG nova.compute.manager [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Received event network-vif-plugged-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1796.311172] env[62525]: DEBUG oslo_concurrency.lockutils [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] Acquiring lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.311385] env[62525]: DEBUG oslo_concurrency.lockutils [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.311634] env[62525]: DEBUG oslo_concurrency.lockutils [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.311716] env[62525]: DEBUG nova.compute.manager [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] No waiting events found dispatching network-vif-plugged-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1796.311873] env[62525]: WARNING nova.compute.manager [req-48699c7f-fc9c-4aa3-90bd-5b93d97e1d5b req-7ba498ac-ed8d-4d8f-896b-19150279b751 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Received unexpected event network-vif-plugged-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 for instance with vm_state building and task_state spawning. [ 1796.550278] env[62525]: DEBUG nova.network.neutron [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Updated VIF entry in instance network info cache for port 8b36d9bf-8d2a-449b-9b2e-813c8c5a1593. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1796.550692] env[62525]: DEBUG nova.network.neutron [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Updating instance_info_cache with network_info: [{"id": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "address": "fa:16:3e:2a:7c:c8", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b36d9bf-8d", "ovs_interfaceid": "8b36d9bf-8d2a-449b-9b2e-813c8c5a1593", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.694539] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069744} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.694810] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1796.695573] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd11f05f-d479-4c79-aa60-aad309a3914e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.701124] env[62525]: DEBUG nova.compute.utils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1796.711324] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1796.711527] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1796.721626] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.723039] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ea8285-3281-4e26-b536-33ccb13e5688 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.743286] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1796.743286] env[62525]: value = "task-1782036" [ 1796.743286] env[62525]: _type = "Task" [ 1796.743286] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.751602] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.776817] env[62525]: DEBUG nova.policy [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5821387073324c7f8617c3b649208db5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '777edc988da64356a254ea680afc0783', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1796.791544] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b15327-cf39-e1e6-6905-98dbd46e7c32, 'name': SearchDatastore_Task, 'duration_secs': 0.009589} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.791905] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.792159] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1796.792387] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.792534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.792714] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1796.793093] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3b50b15-4803-46bf-b98b-2f01de4cb4a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.801229] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1796.801413] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1796.802113] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a01df03-8c37-4f08-b4b8-f2372b389a21 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.807888] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1796.807888] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52042816-ef66-1bd7-d105-95f1f454dae3" [ 1796.807888] env[62525]: _type = "Task" [ 1796.807888] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.816155] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52042816-ef66-1bd7-d105-95f1f454dae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.889421] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Successfully updated port: 3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1797.044620] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Successfully created port: 598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1797.053493] env[62525]: DEBUG oslo_concurrency.lockutils [req-3200e532-bac9-47f1-aca3-707326e523c7 req-ee82503d-f823-41c3-be23-72d1be2f07c0 service nova] Releasing lock "refresh_cache-9fcec068-4921-4a42-b948-6e61a44658ce" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.223475] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1797.255997] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.320886] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52042816-ef66-1bd7-d105-95f1f454dae3, 'name': SearchDatastore_Task, 'duration_secs': 0.010985} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.321725] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30108d2-deb0-4e48-acb6-8fc2a4d4ea3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.330059] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1797.330059] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5221d767-d352-69f7-13ca-4b4286cacf29" [ 1797.330059] env[62525]: _type = "Task" [ 1797.330059] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.338206] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5221d767-d352-69f7-13ca-4b4286cacf29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.392503] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.392667] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.392830] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1797.449170] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4fa2d8-c575-48da-a34c-087ac65b8510 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.458550] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb93bb96-bae9-4f0a-a887-a8c65c98fdb9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.489427] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f54665e-2a83-4a83-bd72-4569004367cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.497220] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f163f0c-c3a2-48c9-aa16-bd329f3708c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.513924] env[62525]: DEBUG nova.compute.provider_tree [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.754451] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782036, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.841943] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5221d767-d352-69f7-13ca-4b4286cacf29, 'name': SearchDatastore_Task, 'duration_secs': 0.029227} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.842176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.842438] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9fcec068-4921-4a42-b948-6e61a44658ce/9fcec068-4921-4a42-b948-6e61a44658ce.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1797.842685] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-243dbd9c-aa54-41b8-9c6d-3cdce9d043e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.849298] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1797.849298] env[62525]: value = "task-1782037" [ 1797.849298] env[62525]: _type = "Task" [ 1797.849298] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.856143] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.868360] env[62525]: DEBUG nova.compute.manager [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Received event network-changed-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.868556] env[62525]: DEBUG nova.compute.manager [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Refreshing instance network info cache due to event network-changed-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1797.868741] env[62525]: DEBUG oslo_concurrency.lockutils [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] Acquiring lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.928749] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1798.018837] env[62525]: DEBUG nova.scheduler.client.report [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1798.101766] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Updating instance_info_cache with network_info: [{"id": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "address": "fa:16:3e:ad:8b:8a", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c768ff1-29", "ovs_interfaceid": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.235334] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1798.254616] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782036, 'name': ReconfigVM_Task, 'duration_secs': 1.015607} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.254728] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfigured VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1798.257071] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae470ff7-3619-4593-9afc-ed905a86e9f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.263554] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1798.263554] env[62525]: value = "task-1782038" [ 1798.263554] env[62525]: _type = "Task" [ 1798.263554] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.265596] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1798.265783] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1798.265917] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.266130] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1798.266280] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.266519] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1798.266653] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1798.266773] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1798.266935] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1798.267111] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1798.267284] env[62525]: DEBUG nova.virt.hardware [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1798.268111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1bbab1-0600-4859-a7e3-a1f703b65a82 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.281535] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5850bc8a-e979-4434-9d72-f4ab161ea160 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.286153] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782038, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.360034] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782037, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.524414] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.331s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.524974] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1798.533022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.730s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.533022] env[62525]: INFO nova.compute.claims [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1798.536807] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Successfully updated port: 598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1798.604551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.604899] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Instance network_info: |[{"id": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "address": "fa:16:3e:ad:8b:8a", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c768ff1-29", "ovs_interfaceid": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1798.605222] env[62525]: DEBUG oslo_concurrency.lockutils [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] Acquired lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.605408] env[62525]: DEBUG nova.network.neutron [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Refreshing network info cache for port 3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1798.606546] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:8b:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1798.614611] env[62525]: DEBUG oslo.service.loopingcall [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1798.617125] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1798.617587] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d40d8181-01ac-4f3e-91b2-de82bbf8cf72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.639027] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1798.639027] env[62525]: value = "task-1782039" [ 1798.639027] env[62525]: _type = "Task" [ 1798.639027] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.646644] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782039, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.779195] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782038, 'name': Rename_Task, 'duration_secs': 0.139542} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.779518] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1798.779832] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3cbc1db-7a0d-4898-8b0c-d4705c49f1e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.786790] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1798.786790] env[62525]: value = "task-1782041" [ 1798.786790] env[62525]: _type = "Task" [ 1798.786790] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.796247] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.846717] env[62525]: DEBUG nova.network.neutron [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Updated VIF entry in instance network info cache for port 3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.847240] env[62525]: DEBUG nova.network.neutron [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Updating instance_info_cache with network_info: [{"id": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "address": "fa:16:3e:ad:8b:8a", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c768ff1-29", "ovs_interfaceid": "3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.860230] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782037, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.037592] env[62525]: DEBUG nova.compute.utils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.042032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.042032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.042265] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.043613] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1799.043834] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.092941] env[62525]: DEBUG nova.policy [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1da85b41cee4803bb8d572bb37a84db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3fe67f961db46b9b3e2c37789829a2c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1799.148204] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782039, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.297317] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782041, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.349789] env[62525]: DEBUG oslo_concurrency.lockutils [req-b9eaa382-c7ab-4426-af18-edcf2aa234d6 req-0c6b645d-bbbc-4f9f-94de-6c6fba454cee service nova] Releasing lock "refresh_cache-c14f3fb8-3090-4df3-9e78-57ee9d62921f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.350985] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Successfully created port: 9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1799.362489] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782037, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.273646} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.362770] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 9fcec068-4921-4a42-b948-6e61a44658ce/9fcec068-4921-4a42-b948-6e61a44658ce.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1799.363006] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1799.363321] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e815474a-6a14-45cd-9337-f8c164965263 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.371135] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1799.371135] env[62525]: value = "task-1782042" [ 1799.371135] env[62525]: _type = "Task" [ 1799.371135] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.379211] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782042, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.550023] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1799.600021] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1799.654870] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782039, 'name': CreateVM_Task, 'duration_secs': 0.913774} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.655360] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1799.655693] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.655851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.656177] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1799.656858] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923f2068-1021-4647-97bd-a76c6b4cec64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.661438] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1799.661438] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ad5f8f-5550-774c-8d06-502c64e20431" [ 1799.661438] env[62525]: _type = "Task" [ 1799.661438] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.669384] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ad5f8f-5550-774c-8d06-502c64e20431, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.757774] env[62525]: DEBUG nova.network.neutron [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Updating instance_info_cache with network_info: [{"id": "598582c7-f89c-4afd-acf4-81a44e028139", "address": "fa:16:3e:c2:9c:6f", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598582c7-f8", "ovs_interfaceid": "598582c7-f89c-4afd-acf4-81a44e028139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.779923] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3adb43b-e8bc-4ed1-9e3c-9c420aaa4a4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.791251] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e90186a-e8b4-4702-b229-0348d840416d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.800786] env[62525]: DEBUG oslo_vmware.api [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782041, 'name': PowerOnVM_Task, 'duration_secs': 0.664569} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.825855] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1799.826098] env[62525]: INFO nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1799.826283] env[62525]: DEBUG nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1799.827540] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b410b1-0866-4a56-9625-ac0731072bba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.830445] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125e660b-eba5-4867-bbae-cee57af8339f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.842041] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a003fa-4356-427c-ad5c-4a8ac4558893 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.855532] env[62525]: DEBUG nova.compute.provider_tree [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.881218] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782042, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096717} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.881557] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1799.882291] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fac340-785e-440c-9c09-d8955679d3b8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.904431] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 9fcec068-4921-4a42-b948-6e61a44658ce/9fcec068-4921-4a42-b948-6e61a44658ce.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1799.905652] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-840d7649-6ff4-485d-a170-f2e48b7b43a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.920763] env[62525]: DEBUG nova.compute.manager [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Received event network-vif-plugged-598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.920971] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Acquiring lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.921191] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.921359] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.921527] env[62525]: DEBUG nova.compute.manager [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] No waiting events found dispatching network-vif-plugged-598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1799.921687] env[62525]: WARNING nova.compute.manager [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Received unexpected event network-vif-plugged-598582c7-f89c-4afd-acf4-81a44e028139 for instance with vm_state building and task_state spawning. [ 1799.921845] env[62525]: DEBUG nova.compute.manager [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Received event network-changed-598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.921995] env[62525]: DEBUG nova.compute.manager [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Refreshing instance network info cache due to event network-changed-598582c7-f89c-4afd-acf4-81a44e028139. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1799.922183] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Acquiring lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.929119] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1799.929119] env[62525]: value = "task-1782043" [ 1799.929119] env[62525]: _type = "Task" [ 1799.929119] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.939675] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.173062] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ad5f8f-5550-774c-8d06-502c64e20431, 'name': SearchDatastore_Task, 'duration_secs': 0.010665} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.173062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.173062] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1800.173533] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.173533] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.173604] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1800.173819] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-425bde3f-aa03-4b3a-836b-b682f450849d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.182162] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1800.182347] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1800.183108] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cd6fa0-a798-465a-bb7a-655b0ebe29c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.188489] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1800.188489] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e4451d-741c-9285-3b20-96518ddec192" [ 1800.188489] env[62525]: _type = "Task" [ 1800.188489] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.195813] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e4451d-741c-9285-3b20-96518ddec192, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.260036] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.260406] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Instance network_info: |[{"id": "598582c7-f89c-4afd-acf4-81a44e028139", "address": "fa:16:3e:c2:9c:6f", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598582c7-f8", "ovs_interfaceid": "598582c7-f89c-4afd-acf4-81a44e028139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1800.260714] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Acquired lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.260893] env[62525]: DEBUG nova.network.neutron [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Refreshing network info cache for port 598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.262359] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:9c:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '598582c7-f89c-4afd-acf4-81a44e028139', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1800.269767] env[62525]: DEBUG oslo.service.loopingcall [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.272496] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1800.272964] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4263e15-4d22-4656-b877-be22136bc812 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.293425] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1800.293425] env[62525]: value = "task-1782044" [ 1800.293425] env[62525]: _type = "Task" [ 1800.293425] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.302034] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782044, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.350203] env[62525]: INFO nova.compute.manager [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Took 16.17 seconds to build instance. [ 1800.358537] env[62525]: DEBUG nova.scheduler.client.report [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1800.440181] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782043, 'name': ReconfigVM_Task, 'duration_secs': 0.289212} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.440462] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 9fcec068-4921-4a42-b948-6e61a44658ce/9fcec068-4921-4a42-b948-6e61a44658ce.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1800.441087] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7358465-2ce5-40de-aeb7-c26e639b0e6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.448280] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1800.448280] env[62525]: value = "task-1782045" [ 1800.448280] env[62525]: _type = "Task" [ 1800.448280] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.456459] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782045, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.498329] env[62525]: DEBUG nova.network.neutron [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Updated VIF entry in instance network info cache for port 598582c7-f89c-4afd-acf4-81a44e028139. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.498816] env[62525]: DEBUG nova.network.neutron [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Updating instance_info_cache with network_info: [{"id": "598582c7-f89c-4afd-acf4-81a44e028139", "address": "fa:16:3e:c2:9c:6f", "network": {"id": "274b7fbe-b628-400f-985f-db68a7114b7e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-653452553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "777edc988da64356a254ea680afc0783", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598582c7-f8", "ovs_interfaceid": "598582c7-f89c-4afd-acf4-81a44e028139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.562616] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1800.593608] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1800.593893] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1800.594103] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.594333] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1800.594520] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.594703] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1800.594948] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1800.595156] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1800.595359] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1800.595557] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1800.595762] env[62525]: DEBUG nova.virt.hardware [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1800.596738] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485ec9c6-3d6c-4fa9-8b82-49101f54cbf9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.604568] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13aedf0-3448-406c-af99-8feab323352d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.701326] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e4451d-741c-9285-3b20-96518ddec192, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.702543] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e01f2423-0f3b-4041-b7b7-6967f05bc6e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.708391] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1800.708391] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52da7af0-a8f2-3662-f648-09b2016ca911" [ 1800.708391] env[62525]: _type = "Task" [ 1800.708391] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.716311] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52da7af0-a8f2-3662-f648-09b2016ca911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.806265] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782044, 'name': CreateVM_Task, 'duration_secs': 0.44639} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.806529] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1800.807135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.807305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.807694] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1800.807918] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f95928ba-586d-460a-abda-37fab3578aef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.813089] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1800.813089] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52ae2405-b6c6-5ced-bb91-974fd4647d9a" [ 1800.813089] env[62525]: _type = "Task" [ 1800.813089] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.824668] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ae2405-b6c6-5ced-bb91-974fd4647d9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.852619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-01ac2a0f-689d-4485-b9a2-56da1dcbe736 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.682s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.866552] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.867149] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1800.870058] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.127s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.870337] env[62525]: DEBUG nova.objects.instance [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'resources' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.948591] env[62525]: DEBUG nova.compute.manager [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Received event network-changed-931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1800.948591] env[62525]: DEBUG nova.compute.manager [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Refreshing instance network info cache due to event network-changed-931ab189-c48d-469b-8776-5e4d3c8cf77a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1800.948591] env[62525]: DEBUG oslo_concurrency.lockutils [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.948591] env[62525]: DEBUG oslo_concurrency.lockutils [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.948953] env[62525]: DEBUG nova.network.neutron [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Refreshing network info cache for port 931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.959366] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Successfully updated port: 9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1800.968533] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782045, 'name': Rename_Task, 'duration_secs': 0.207178} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.968834] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1800.969110] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f93077ac-cd00-4c08-b943-47e6c52502f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.977640] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1800.977640] env[62525]: value = "task-1782046" [ 1800.977640] env[62525]: _type = "Task" [ 1800.977640] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.993196] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.001891] env[62525]: DEBUG oslo_concurrency.lockutils [req-2bcc3b0c-e6b9-485e-b7a5-8cdd7d1542e9 req-93a72df5-426c-4278-a273-f2dc7b7fad7a service nova] Releasing lock "refresh_cache-467c6af1-2961-4213-8f0c-fe7591d93b5d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.218738] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52da7af0-a8f2-3662-f648-09b2016ca911, 'name': SearchDatastore_Task, 'duration_secs': 0.010036} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.219070] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.219328] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c14f3fb8-3090-4df3-9e78-57ee9d62921f/c14f3fb8-3090-4df3-9e78-57ee9d62921f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1801.219933] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-878e817a-6273-42cd-8866-79a8a83d86ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.227298] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1801.227298] env[62525]: value = "task-1782047" [ 1801.227298] env[62525]: _type = "Task" [ 1801.227298] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.233122] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1801.233363] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1801.234464] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e677ee73-0f42-4d77-ab38-f6ae4af7eecc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.239943] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.253628] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53d0c80-5e04-4a82-886c-59f751eedd18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.280296] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1801.280656] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce8fdb2-a01b-4f50-831b-751746c59b59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.299305] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1801.299305] env[62525]: value = "task-1782048" [ 1801.299305] env[62525]: _type = "Task" [ 1801.299305] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.309027] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.323243] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52ae2405-b6c6-5ced-bb91-974fd4647d9a, 'name': SearchDatastore_Task, 'duration_secs': 0.009455} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.323524] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.323768] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1801.324033] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.324188] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.324369] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1801.324654] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f21cc9b-5cad-4815-a8a0-c2c78fe43b04 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.334529] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1801.334699] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1801.335486] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86d15888-96e8-4087-a0be-cb07b6e9f294 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.341051] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1801.341051] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5250e563-c050-08dc-34dc-f2acf6826589" [ 1801.341051] env[62525]: _type = "Task" [ 1801.341051] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.350117] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5250e563-c050-08dc-34dc-f2acf6826589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.373675] env[62525]: DEBUG nova.compute.utils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.375236] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1801.375348] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1801.377480] env[62525]: DEBUG nova.objects.instance [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'numa_topology' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.429274] env[62525]: DEBUG nova.policy [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1801.462740] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.462926] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.463143] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.489528] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782046, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.741930] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782047, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.809684] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782048, 'name': ReconfigVM_Task, 'duration_secs': 0.495975} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.810010] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1801.814738] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6beb6675-3fbb-493b-94ac-eab8fea87350 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.830363] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1801.830363] env[62525]: value = "task-1782049" [ 1801.830363] env[62525]: _type = "Task" [ 1801.830363] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.837415] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.849759] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5250e563-c050-08dc-34dc-f2acf6826589, 'name': SearchDatastore_Task, 'duration_secs': 0.008938} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.850543] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c73d7695-6e4a-420a-9601-aa26cd0053d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.853606] env[62525]: DEBUG nova.network.neutron [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updated VIF entry in instance network info cache for port 931ab189-c48d-469b-8776-5e4d3c8cf77a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.853946] env[62525]: DEBUG nova.network.neutron [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.858152] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1801.858152] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52976129-ddcd-9a29-211c-cc40c0236deb" [ 1801.858152] env[62525]: _type = "Task" [ 1801.858152] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.866407] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52976129-ddcd-9a29-211c-cc40c0236deb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.879702] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1801.882236] env[62525]: DEBUG nova.objects.base [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1801.906669] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Successfully created port: 84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1801.970345] env[62525]: DEBUG nova.compute.manager [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Received event network-vif-plugged-9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.971008] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Acquiring lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.971237] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.971419] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.971682] env[62525]: DEBUG nova.compute.manager [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] No waiting events found dispatching network-vif-plugged-9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1801.971875] env[62525]: WARNING nova.compute.manager [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Received unexpected event network-vif-plugged-9fb5fc43-3369-489c-829b-506754512d51 for instance with vm_state building and task_state spawning. [ 1801.972123] env[62525]: DEBUG nova.compute.manager [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Received event network-changed-9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.972288] env[62525]: DEBUG nova.compute.manager [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Refreshing instance network info cache due to event network-changed-9fb5fc43-3369-489c-829b-506754512d51. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1801.972465] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Acquiring lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.990334] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782046, 'name': PowerOnVM_Task, 'duration_secs': 0.586725} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.993069] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1801.993295] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1801.993477] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1801.994718] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b41a9b-5643-451f-8643-59cac5875fcb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.029530] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.123961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd67aac-9c1c-46a6-b01f-177961f87918 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.131937] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fc8077-0a19-4e04-ab35-bf64a874dba7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.163341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196a32d6-e902-4202-a843-9b7d1bad90e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.170894] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ea8fc2-ccca-478e-95de-665259fc5324 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.186698] env[62525]: DEBUG nova.compute.provider_tree [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.238386] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584417} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.239391] env[62525]: DEBUG nova.network.neutron [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updating instance_info_cache with network_info: [{"id": "9fb5fc43-3369-489c-829b-506754512d51", "address": "fa:16:3e:98:ad:28", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fb5fc43-33", "ovs_interfaceid": "9fb5fc43-3369-489c-829b-506754512d51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.240605] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c14f3fb8-3090-4df3-9e78-57ee9d62921f/c14f3fb8-3090-4df3-9e78-57ee9d62921f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1802.240826] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1802.241610] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39b29180-6933-4847-af03-970e8508e278 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.248241] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1802.248241] env[62525]: value = "task-1782050" [ 1802.248241] env[62525]: _type = "Task" [ 1802.248241] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.258776] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.341059] env[62525]: DEBUG oslo_vmware.api [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782049, 'name': ReconfigVM_Task, 'duration_secs': 0.216542} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.341365] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1802.356378] env[62525]: DEBUG oslo_concurrency.lockutils [req-741e8cad-0d7b-4f8f-b56d-25eb6e7548b8 req-1ea120dc-d64d-4d13-826d-58c73b6c4fd0 service nova] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.367691] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52976129-ddcd-9a29-211c-cc40c0236deb, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.367932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.368203] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 467c6af1-2961-4213-8f0c-fe7591d93b5d/467c6af1-2961-4213-8f0c-fe7591d93b5d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1802.368450] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97c2a02f-71e5-4b8d-855f-5f6c6ac76fa5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.375086] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1802.375086] env[62525]: value = "task-1782051" [ 1802.375086] env[62525]: _type = "Task" [ 1802.375086] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.382832] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782051, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.512854] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Took 17.68 seconds to build instance. [ 1802.690199] env[62525]: DEBUG nova.scheduler.client.report [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1802.743051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.743410] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Instance network_info: |[{"id": "9fb5fc43-3369-489c-829b-506754512d51", "address": "fa:16:3e:98:ad:28", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fb5fc43-33", "ovs_interfaceid": "9fb5fc43-3369-489c-829b-506754512d51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1802.743776] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Acquired lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.743965] env[62525]: DEBUG nova.network.neutron [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Refreshing network info cache for port 9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1802.745300] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:ad:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fb5fc43-3369-489c-829b-506754512d51', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1802.753328] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating folder: Project (d3fe67f961db46b9b3e2c37789829a2c). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.754563] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6292a37c-3886-4eb8-b1e1-715dad4e970a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.765575] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066051} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.765844] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1802.766643] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9840051c-9570-4736-9de3-d75d17a04166 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.770668] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created folder: Project (d3fe67f961db46b9b3e2c37789829a2c) in parent group-v369553. [ 1802.770875] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating folder: Instances. Parent ref: group-v369839. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.771813] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be2d2167-2e02-4d5b-aca6-3ec3d1b66224 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.792640] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] c14f3fb8-3090-4df3-9e78-57ee9d62921f/c14f3fb8-3090-4df3-9e78-57ee9d62921f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1802.793479] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81d09b90-6b80-4756-a454-14b79f2a91de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.809946] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created folder: Instances in parent group-v369839. [ 1802.810227] env[62525]: DEBUG oslo.service.loopingcall [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.810855] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1802.811102] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-666271c2-c0aa-4ef0-a490-61ec83c8da47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.826816] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1802.826816] env[62525]: value = "task-1782054" [ 1802.826816] env[62525]: _type = "Task" [ 1802.826816] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.831716] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1802.831716] env[62525]: value = "task-1782055" [ 1802.831716] env[62525]: _type = "Task" [ 1802.831716] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.838734] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782054, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.846289] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782055, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.885510] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782051, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.891185] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1802.919550] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1802.919832] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1802.920019] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1802.920246] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1802.920456] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1802.920618] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1802.920841] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1802.921014] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1802.921210] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1802.921393] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1802.921570] env[62525]: DEBUG nova.virt.hardware [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1802.922498] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b587aea2-9e02-4dc6-8447-56b2df55f00d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.934202] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcb3eb9-682d-41f6-a809-ffa1a75f1a4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.015047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.194s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.196716] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.198418] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.239s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.198627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.200768] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.896s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.225722] env[62525]: INFO nova.scheduler.client.report [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleted allocations for instance 6624506c-56ad-41f4-8d90-ed34ccfb9385 [ 1803.340031] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782054, 'name': ReconfigVM_Task, 'duration_secs': 0.364798} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.341772] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Reconfigured VM instance instance-00000068 to attach disk [datastore1] c14f3fb8-3090-4df3-9e78-57ee9d62921f/c14f3fb8-3090-4df3-9e78-57ee9d62921f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1803.346729] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35c14fab-b005-4ad3-bbef-07f8be67d7f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.348664] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782055, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.354463] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1803.354463] env[62525]: value = "task-1782056" [ 1803.354463] env[62525]: _type = "Task" [ 1803.354463] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.365526] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782056, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.388121] env[62525]: DEBUG nova.objects.instance [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'flavor' on Instance uuid 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1803.389613] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782051, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578763} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.395404] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 467c6af1-2961-4213-8f0c-fe7591d93b5d/467c6af1-2961-4213-8f0c-fe7591d93b5d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1803.395404] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1803.397017] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-298d9095-125e-4e8f-8010-11cdf78337b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.406039] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1803.406039] env[62525]: value = "task-1782057" [ 1803.406039] env[62525]: _type = "Task" [ 1803.406039] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.419919] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.677380] env[62525]: DEBUG nova.compute.manager [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Received event network-vif-plugged-84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.677611] env[62525]: DEBUG oslo_concurrency.lockutils [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] Acquiring lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.677823] env[62525]: DEBUG oslo_concurrency.lockutils [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.678009] env[62525]: DEBUG oslo_concurrency.lockutils [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.678176] env[62525]: DEBUG nova.compute.manager [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] No waiting events found dispatching network-vif-plugged-84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1803.678344] env[62525]: WARNING nova.compute.manager [req-cf7968f7-990f-4bf4-afd0-ce584019076b req-420fb958-0e3c-444d-960b-d0318fbc2477 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Received unexpected event network-vif-plugged-84be5220-9eed-41b9-8882-cb2acc60aa3d for instance with vm_state building and task_state spawning. [ 1803.709930] env[62525]: INFO nova.compute.claims [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.715688] env[62525]: DEBUG oslo_concurrency.lockutils [None req-017fe5ed-6ced-4c3f-8a61-bdbb9b548b76 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.023s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.717277] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 11.174s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.717489] env[62525]: INFO nova.compute.manager [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Unshelving [ 1803.732980] env[62525]: DEBUG oslo_concurrency.lockutils [None req-025b80e4-fccb-4c51-b65a-74dd15b378dd tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "6624506c-56ad-41f4-8d90-ed34ccfb9385" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.343s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.762722] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Successfully updated port: 84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1803.818035] env[62525]: DEBUG nova.network.neutron [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updated VIF entry in instance network info cache for port 9fb5fc43-3369-489c-829b-506754512d51. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1803.818402] env[62525]: DEBUG nova.network.neutron [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updating instance_info_cache with network_info: [{"id": "9fb5fc43-3369-489c-829b-506754512d51", "address": "fa:16:3e:98:ad:28", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fb5fc43-33", "ovs_interfaceid": "9fb5fc43-3369-489c-829b-506754512d51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.843397] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782055, 'name': CreateVM_Task, 'duration_secs': 0.628937} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.843566] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1803.844240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.844407] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.844731] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1803.844984] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96b3423b-eda5-4f15-a4e6-b4257d7ac075 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.850055] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1803.850055] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5252ab45-5d61-a3d0-e355-4014c90a860d" [ 1803.850055] env[62525]: _type = "Task" [ 1803.850055] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.860559] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5252ab45-5d61-a3d0-e355-4014c90a860d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.865953] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782056, 'name': Rename_Task, 'duration_secs': 0.176579} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.866342] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1803.866696] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c9efc77-3867-4a02-b26a-ec5ffbc61835 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.873883] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1803.873883] env[62525]: value = "task-1782058" [ 1803.873883] env[62525]: _type = "Task" [ 1803.873883] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.881876] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.899820] env[62525]: DEBUG oslo_concurrency.lockutils [None req-505ae2b3-5e1e-4389-a4f9-da2792336775 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.772s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.918951] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.918951] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1803.919738] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d491f86-8e3c-4e28-99f7-e59adc560513 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.945785] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 467c6af1-2961-4213-8f0c-fe7591d93b5d/467c6af1-2961-4213-8f0c-fe7591d93b5d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1803.946776] env[62525]: INFO nova.compute.manager [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Rebuilding instance [ 1803.948894] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b48a8b7-5687-4cc7-ad11-e7fbd4f41e60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.973937] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1803.973937] env[62525]: value = "task-1782059" [ 1803.973937] env[62525]: _type = "Task" [ 1803.973937] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.983037] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.007173] env[62525]: DEBUG nova.compute.manager [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Received event network-changed-84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1804.007460] env[62525]: DEBUG nova.compute.manager [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Refreshing instance network info cache due to event network-changed-84be5220-9eed-41b9-8882-cb2acc60aa3d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1804.007755] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] Acquiring lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.007954] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] Acquired lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.008288] env[62525]: DEBUG nova.network.neutron [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Refreshing network info cache for port 84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1804.019203] env[62525]: DEBUG nova.compute.manager [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1804.020054] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d67724-e538-4184-b85a-caa33cf260b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.219182] env[62525]: INFO nova.compute.resource_tracker [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating resource usage from migration 3b8de5bf-1290-42c3-8fa6-f9777a7f3941 [ 1804.267806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.320588] env[62525]: DEBUG oslo_concurrency.lockutils [req-dd87e15e-3349-4761-ae02-da888ee634a0 req-ec300193-34ea-4da9-a015-811e41a8f28f service nova] Releasing lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.363564] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5252ab45-5d61-a3d0-e355-4014c90a860d, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.367023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.367023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1804.367023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.367023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.367023] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1804.367497] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-728a7122-6db3-41dc-a197-cb1daba295df {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.381931] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1804.382144] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1804.383358] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58247454-7289-4501-8cf2-8f357cc62117 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.388602] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782058, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.392351] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1804.392351] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fa0d3b-75b0-745b-5045-704bdeee91f7" [ 1804.392351] env[62525]: _type = "Task" [ 1804.392351] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.404233] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fa0d3b-75b0-745b-5045-704bdeee91f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.420249] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad83224-4fc4-45d1-8744-c83447dda5cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.429692] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37167265-393b-4aff-b760-127175c11922 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.461019] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2222ca4f-df71-4860-8e73-083c6bc5232b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.469013] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192180c8-8356-4bc1-baca-082cf34d886f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.484971] env[62525]: DEBUG nova.compute.provider_tree [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.491358] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782059, 'name': ReconfigVM_Task, 'duration_secs': 0.388553} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.491638] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 467c6af1-2961-4213-8f0c-fe7591d93b5d/467c6af1-2961-4213-8f0c-fe7591d93b5d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1804.492438] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87f2d9f2-324b-46b0-818c-0507b2c8311a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.500415] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1804.500415] env[62525]: value = "task-1782060" [ 1804.500415] env[62525]: _type = "Task" [ 1804.500415] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.508551] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782060, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.519638] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.519864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.520084] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.520534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.520534] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.522631] env[62525]: INFO nova.compute.manager [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Terminating instance [ 1804.525060] env[62525]: DEBUG nova.compute.manager [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1804.525309] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1804.526199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba1e8f-5583-428c-831b-85f23603afc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.536597] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1804.536885] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1804.537446] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58aa0a3e-eba4-40ab-9685-ad2aac13663a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.538977] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-049ff05b-1a66-4ccb-95f3-0e229d27531c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.545759] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1804.545759] env[62525]: value = "task-1782062" [ 1804.545759] env[62525]: _type = "Task" [ 1804.545759] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.547335] env[62525]: DEBUG nova.network.neutron [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1804.549136] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1804.549136] env[62525]: value = "task-1782061" [ 1804.549136] env[62525]: _type = "Task" [ 1804.549136] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.561355] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.564914] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.636125] env[62525]: DEBUG nova.network.neutron [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.729506] env[62525]: DEBUG nova.compute.utils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.884529] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782058, 'name': PowerOnVM_Task, 'duration_secs': 0.57574} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.884805] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1804.885012] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1804.885193] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1804.885938] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b08d6b-a33e-4886-a138-a1938610b890 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.903674] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fa0d3b-75b0-745b-5045-704bdeee91f7, 'name': SearchDatastore_Task, 'duration_secs': 0.012015} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.904399] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3fd447-7606-4324-80df-b83381254d1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.909057] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1804.909057] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52eb64e4-970d-a443-5a16-ad2d096f6ef2" [ 1804.909057] env[62525]: _type = "Task" [ 1804.909057] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.916318] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52eb64e4-970d-a443-5a16-ad2d096f6ef2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.988136] env[62525]: DEBUG nova.scheduler.client.report [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1805.010403] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782060, 'name': Rename_Task, 'duration_secs': 0.241608} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.010670] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1805.010906] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b19b9e5-f2eb-495c-81bd-9050652c125c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.016599] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1805.016599] env[62525]: value = "task-1782063" [ 1805.016599] env[62525]: _type = "Task" [ 1805.016599] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.024172] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.061648] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782062, 'name': PowerOffVM_Task, 'duration_secs': 0.272376} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.065991] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1805.065991] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1805.065991] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782061, 'name': PowerOffVM_Task, 'duration_secs': 0.303968} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.066174] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b0010cb-1a82-438d-b184-d84f8c277447 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.067469] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1805.122337] env[62525]: INFO nova.compute.manager [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Detaching volume d818f996-4266-47c4-ab1e-9827cfc22a7d [ 1805.138796] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e3852fc-4111-4722-b320-099bbd88559f req-e4a3f4a4-77cd-484b-923c-21493253b036 service nova] Releasing lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.139566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.139722] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1805.156362] env[62525]: INFO nova.virt.block_device [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Attempting to driver detach volume d818f996-4266-47c4-ab1e-9827cfc22a7d from mountpoint /dev/sdb [ 1805.156596] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1805.156775] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1805.157658] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70692219-0769-426e-be9e-58fda5b775e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.180964] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf7dd34-c3fb-4216-93f6-71ca6e9cb247 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.187869] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d5d013-c210-4ffd-b098-006c0309b3b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.207797] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426991c7-54c5-4122-bf22-6eb48cfdd8ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.222559] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] The volume has not been displaced from its original location: [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1805.227774] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1805.228063] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-934f303b-df5d-4988-8230-90a8ada8439e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.242092] env[62525]: INFO nova.virt.block_device [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Booting with volume aa014b16-de19-45f8-9702-f93bf9cafd8f at /dev/sdb [ 1805.249495] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1805.249495] env[62525]: value = "task-1782065" [ 1805.249495] env[62525]: _type = "Task" [ 1805.249495] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.259418] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.266309] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1805.266501] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1805.266703] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleting the datastore file [datastore1] 462bc19d-1eaa-4c57-8ebb-412a97614f03 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1805.266967] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09f39528-589b-432d-b3cd-427757f04b09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.274235] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for the task: (returnval){ [ 1805.274235] env[62525]: value = "task-1782066" [ 1805.274235] env[62525]: _type = "Task" [ 1805.274235] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.275299] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-025f1bca-b1a0-4ded-8519-6404a381e813 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.285727] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.289753] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c428982-1f03-4673-bc40-5107c4a116d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.319417] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6040c532-31e4-4a1c-95b1-9e7e66976a01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.328400] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f97963-0e58-4bd3-9fc5-c2f7f5cb9329 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.360511] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d558e743-9dd7-4d24-80dc-6889c5675073 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.367472] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5345c4ac-7807-40ac-a7d7-2908a13656c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.380950] env[62525]: DEBUG nova.virt.block_device [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating existing volume attachment record: 5085591e-5db5-4d50-9c50-8eac523076d1 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1805.401474] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Took 20.52 seconds to build instance. [ 1805.421877] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52eb64e4-970d-a443-5a16-ad2d096f6ef2, 'name': SearchDatastore_Task, 'duration_secs': 0.016056} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.422173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.422441] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/69a1093a-95d7-4cbb-90bf-1a213470872a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1805.422713] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7417a92-2392-4dcb-8f98-73c40cb9071f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.430533] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1805.430533] env[62525]: value = "task-1782067" [ 1805.430533] env[62525]: _type = "Task" [ 1805.430533] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.438664] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.493173] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.292s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.493404] env[62525]: INFO nova.compute.manager [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Migrating [ 1805.527688] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782063, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.672524] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1805.759371] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782065, 'name': ReconfigVM_Task, 'duration_secs': 0.26631} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.759673] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1805.764305] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc397f3c-5d35-4192-ba90-760773f399de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.785660] env[62525]: DEBUG oslo_vmware.api [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Task: {'id': task-1782066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228238} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.786884] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1805.787096] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1805.787280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1805.787539] env[62525]: INFO nova.compute.manager [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1805.787770] env[62525]: DEBUG oslo.service.loopingcall [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.788032] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1805.788032] env[62525]: value = "task-1782071" [ 1805.788032] env[62525]: _type = "Task" [ 1805.788032] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.788228] env[62525]: DEBUG nova.compute.manager [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1805.788365] env[62525]: DEBUG nova.network.neutron [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1805.797637] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.834058] env[62525]: DEBUG nova.network.neutron [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Updating instance_info_cache with network_info: [{"id": "84be5220-9eed-41b9-8882-cb2acc60aa3d", "address": "fa:16:3e:62:8d:13", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84be5220-9e", "ovs_interfaceid": "84be5220-9eed-41b9-8882-cb2acc60aa3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.903548] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.044s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.940915] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782067, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.009559] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.009763] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.009949] env[62525]: DEBUG nova.network.neutron [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.029086] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782063, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.085958] env[62525]: DEBUG nova.compute.manager [req-a12ef948-7a29-4150-8d0d-bd601b24edd7 req-ddacd27b-4b13-4d05-88cc-5bd8f809a87d service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Received event network-vif-deleted-3edc6d99-8711-4b37-869a-4e1238dc7a5a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1806.086221] env[62525]: INFO nova.compute.manager [req-a12ef948-7a29-4150-8d0d-bd601b24edd7 req-ddacd27b-4b13-4d05-88cc-5bd8f809a87d service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Neutron deleted interface 3edc6d99-8711-4b37-869a-4e1238dc7a5a; detaching it from the instance and deleting it from the info cache [ 1806.086389] env[62525]: DEBUG nova.network.neutron [req-a12ef948-7a29-4150-8d0d-bd601b24edd7 req-ddacd27b-4b13-4d05-88cc-5bd8f809a87d service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.299517] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782071, 'name': ReconfigVM_Task, 'duration_secs': 0.171343} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.299838] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1806.336811] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-61fa8887-db88-4adc-8c3f-ffc78e0e550d" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.337181] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance network_info: |[{"id": "84be5220-9eed-41b9-8882-cb2acc60aa3d", "address": "fa:16:3e:62:8d:13", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84be5220-9e", "ovs_interfaceid": "84be5220-9eed-41b9-8882-cb2acc60aa3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1806.337682] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:8d:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84be5220-9eed-41b9-8882-cb2acc60aa3d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1806.345147] env[62525]: DEBUG oslo.service.loopingcall [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1806.345373] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1806.345617] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e20e5a23-c10a-42c0-98f9-6730b71dc615 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.365722] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1806.365722] env[62525]: value = "task-1782072" [ 1806.365722] env[62525]: _type = "Task" [ 1806.365722] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.373743] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782072, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.441773] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817568} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.442107] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/69a1093a-95d7-4cbb-90bf-1a213470872a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1806.442376] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1806.442681] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c4f82ec-0b6b-4335-8d17-7fb4b3b4c1e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.449174] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1806.449174] env[62525]: value = "task-1782073" [ 1806.449174] env[62525]: _type = "Task" [ 1806.449174] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.457889] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.528021] env[62525]: DEBUG oslo_vmware.api [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782063, 'name': PowerOnVM_Task, 'duration_secs': 1.283126} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.528385] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1806.528518] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1806.528698] env[62525]: DEBUG nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1806.529576] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986a7ee5-86ae-4756-b0db-65a32ff8eb9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.563501] env[62525]: DEBUG nova.network.neutron [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.591369] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9caf7dd0-6250-4bae-be57-0205922cb752 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.601977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a919a6b-e7f6-42cf-b9ed-1576b9c65fc9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.638319] env[62525]: DEBUG nova.compute.manager [req-a12ef948-7a29-4150-8d0d-bd601b24edd7 req-ddacd27b-4b13-4d05-88cc-5bd8f809a87d service nova] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Detach interface failed, port_id=3edc6d99-8711-4b37-869a-4e1238dc7a5a, reason: Instance 462bc19d-1eaa-4c57-8ebb-412a97614f03 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1806.737056] env[62525]: DEBUG nova.network.neutron [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.875214] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782072, 'name': CreateVM_Task, 'duration_secs': 0.465124} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.875382] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1806.876030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.876206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.876535] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.876773] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c9f6bd5-db61-4f50-a490-7dfec3a9bf63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.881833] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1806.881833] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528b0d53-a66e-959e-5658-76d92db6189e" [ 1806.881833] env[62525]: _type = "Task" [ 1806.881833] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.889225] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528b0d53-a66e-959e-5658-76d92db6189e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.968975] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070278} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.969458] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1806.970808] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0664608a-800d-43e5-ade7-6b72bc523f69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.002049] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/69a1093a-95d7-4cbb-90bf-1a213470872a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.002294] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f968cf87-bf1f-488b-87b5-11a4ccd15a5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.022183] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1807.022183] env[62525]: value = "task-1782074" [ 1807.022183] env[62525]: _type = "Task" [ 1807.022183] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.030982] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.048123] env[62525]: INFO nova.compute.manager [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Took 22.14 seconds to build instance. [ 1807.067802] env[62525]: INFO nova.compute.manager [-] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Took 1.28 seconds to deallocate network for instance. [ 1807.240463] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.351251] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1807.351561] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32ac72d8-ed13-4be1-b9ea-9b59a62c00f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.358747] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1807.358747] env[62525]: value = "task-1782075" [ 1807.358747] env[62525]: _type = "Task" [ 1807.358747] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.366630] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.392859] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]528b0d53-a66e-959e-5658-76d92db6189e, 'name': SearchDatastore_Task, 'duration_secs': 0.022333} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.393211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.393377] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.393633] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.393792] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.393973] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.394249] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70f93f80-862d-40b0-b1c9-2b0d5f7fab28 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.412388] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.412577] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.413441] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baad7550-433d-4280-9191-eeb8a6b61e24 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.419279] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1807.419279] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f06c03-eef8-935c-fffa-351cd943f3bf" [ 1807.419279] env[62525]: _type = "Task" [ 1807.419279] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.427977] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f06c03-eef8-935c-fffa-351cd943f3bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.534213] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782074, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.553911] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0cdf38bc-a2cd-4c0f-b0a9-6855a6bdcccd tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.661s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.575393] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.575671] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.575964] env[62525]: DEBUG nova.objects.instance [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lazy-loading 'resources' on Instance uuid 462bc19d-1eaa-4c57-8ebb-412a97614f03 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1807.869540] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1807.869787] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1807.869985] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1807.870779] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b191e5-031e-40f0-aa89-0f11b49bfe4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.889231] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edcdd02-731f-4a7b-b5ca-49bedffeae57 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.895385] env[62525]: WARNING nova.virt.vmwareapi.driver [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1807.895670] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1807.896402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5708f845-ca85-4a86-a157-0534433ba38a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.902701] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1807.902902] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d282898b-e093-4a96-bb75-e495daa1a198 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.929441] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f06c03-eef8-935c-fffa-351cd943f3bf, 'name': SearchDatastore_Task, 'duration_secs': 0.014544} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.930264] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35b8dd09-3d28-434d-95ef-9bdaa74519b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.935870] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1807.935870] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52745a39-b92f-0296-1a4f-770216e0f6e1" [ 1807.935870] env[62525]: _type = "Task" [ 1807.935870] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.943638] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52745a39-b92f-0296-1a4f-770216e0f6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.031914] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782074, 'name': ReconfigVM_Task, 'duration_secs': 0.533136} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.033046] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/69a1093a-95d7-4cbb-90bf-1a213470872a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.033721] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1808.033929] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1808.034141] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1808.034373] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a5ddd3b-c3ca-4ea7-b335-455d3668833d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.035946] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8acd0bd7-927e-4866-b4bb-591ff0a5dbaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.041398] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1808.041398] env[62525]: value = "task-1782079" [ 1808.041398] env[62525]: _type = "Task" [ 1808.041398] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.042521] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1808.042521] env[62525]: value = "task-1782078" [ 1808.042521] env[62525]: _type = "Task" [ 1808.042521] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.052582] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.055485] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782078, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.299848] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffec6bdc-ee53-4b66-a16f-9e183c7c34da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.308318] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d1b3ee-5873-43e9-aa77-5a455fa64356 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.341782] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e07be23-f5dc-42ee-9a23-13ca9051172e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.349723] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8f54ba-12b7-4654-8cde-99494cd0f54b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.363936] env[62525]: DEBUG nova.compute.provider_tree [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1808.415450] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "9fcec068-4921-4a42-b948-6e61a44658ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.415795] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.415974] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.416202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.416384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.418605] env[62525]: INFO nova.compute.manager [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Terminating instance [ 1808.420381] env[62525]: DEBUG nova.compute.manager [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1808.420577] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1808.421426] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d014af3-018b-48f6-bbdf-84192aa98f75 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.429288] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1808.429609] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8827f68-cd0f-4e6c-b222-155a652788f2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.435827] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1808.435827] env[62525]: value = "task-1782080" [ 1808.435827] env[62525]: _type = "Task" [ 1808.435827] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.448196] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.451859] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52745a39-b92f-0296-1a4f-770216e0f6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.033715} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.452154] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.452424] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 61fa8887-db88-4adc-8c3f-ffc78e0e550d/61fa8887-db88-4adc-8c3f-ffc78e0e550d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1808.452715] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f9e7319-6cca-4b5d-81a3-bc93d7cb0b2c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.459503] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1808.459503] env[62525]: value = "task-1782081" [ 1808.459503] env[62525]: _type = "Task" [ 1808.459503] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.467670] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.559159] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782078, 'name': Rename_Task, 'duration_secs': 0.184873} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.559594] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263546} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.559863] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1808.560187] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1808.560422] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1808.560714] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1808.563528] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d73b50e-9bce-4322-b690-69a66092a750 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.570865] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1808.570865] env[62525]: value = "task-1782082" [ 1808.570865] env[62525]: _type = "Task" [ 1808.570865] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.580738] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.758642] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4e39c5-68b6-4f41-981c-d0ad79fb32d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.787800] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1808.867563] env[62525]: DEBUG nova.scheduler.client.report [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1808.947067] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782080, 'name': PowerOffVM_Task, 'duration_secs': 0.211075} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.947391] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1808.947591] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1808.948225] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54f8eafa-046a-4b7b-8524-374485182a8a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.969458] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782081, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.067536] env[62525]: INFO nova.virt.block_device [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Booting with volume d818f996-4266-47c4-ab1e-9827cfc22a7d at /dev/sdb [ 1809.082477] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782082, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.105656] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00fdbe8a-07e9-4f0f-8686-ebb4e74c29aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.117966] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be58e607-5fc7-4f8e-a3e4-a80462da3908 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.149679] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1809.150095] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1809.150438] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleting the datastore file [datastore1] 9fcec068-4921-4a42-b948-6e61a44658ce {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1809.160881] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cd8570c-ef55-4e3c-8927-82061f5ddc40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.163024] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aba4d83b-a0be-43b9-9d5b-ee105645d77f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.172829] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c10293-a4f8-489e-b6ea-9775a59d9695 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.184298] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1809.184298] env[62525]: value = "task-1782084" [ 1809.184298] env[62525]: _type = "Task" [ 1809.184298] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.206635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85934fa-d0b8-4f72-bad7-a3ca38c80378 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.209147] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.213296] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2837567-2011-41c8-a07f-680ff5ae3b01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.226391] env[62525]: DEBUG nova.virt.block_device [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating existing volume attachment record: 03da3655-3d50-4297-aafe-75ec00f3477f {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1809.295700] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.296069] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20f6f519-940d-480b-a6f8-b589413cb2da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.303306] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1809.303306] env[62525]: value = "task-1782085" [ 1809.303306] env[62525]: _type = "Task" [ 1809.303306] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.311804] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.372972] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.393974] env[62525]: INFO nova.scheduler.client.report [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Deleted allocations for instance 462bc19d-1eaa-4c57-8ebb-412a97614f03 [ 1809.470942] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640062} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.471248] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 61fa8887-db88-4adc-8c3f-ffc78e0e550d/61fa8887-db88-4adc-8c3f-ffc78e0e550d.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.471525] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.471801] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed2e45de-00b7-4546-bb41-11a490d5f119 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.478263] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1809.478263] env[62525]: value = "task-1782086" [ 1809.478263] env[62525]: _type = "Task" [ 1809.478263] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.487620] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.581295] env[62525]: DEBUG oslo_vmware.api [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782082, 'name': PowerOnVM_Task, 'duration_secs': 0.520396} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.581657] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1809.581919] env[62525]: INFO nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1809.582133] env[62525]: DEBUG nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1809.583017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d7bca5-7ae5-4233-ae29-2455e5730ac6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.693882] env[62525]: DEBUG oslo_vmware.api [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.333789} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.693882] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1809.694090] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1809.694272] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1809.694440] env[62525]: INFO nova.compute.manager [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1809.694678] env[62525]: DEBUG oslo.service.loopingcall [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1809.694872] env[62525]: DEBUG nova.compute.manager [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1809.694954] env[62525]: DEBUG nova.network.neutron [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1809.813383] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782085, 'name': PowerOffVM_Task, 'duration_secs': 0.281375} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.813652] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.813865] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1809.905090] env[62525]: DEBUG oslo_concurrency.lockutils [None req-53a757cd-0217-4900-80af-c7da52ec7103 tempest-MigrationsAdminTest-1000348462 tempest-MigrationsAdminTest-1000348462-project-member] Lock "462bc19d-1eaa-4c57-8ebb-412a97614f03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.384s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.987891] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086075} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.988190] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.988972] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c500cb-1ae8-460d-8656-c7f226be7af4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.011788] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 61fa8887-db88-4adc-8c3f-ffc78e0e550d/61fa8887-db88-4adc-8c3f-ffc78e0e550d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1810.012065] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da452686-ad53-4794-bae3-4053f6d46c39 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.031914] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1810.031914] env[62525]: value = "task-1782087" [ 1810.031914] env[62525]: _type = "Task" [ 1810.031914] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.040631] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.104477] env[62525]: INFO nova.compute.manager [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Took 22.76 seconds to build instance. [ 1810.114922] env[62525]: DEBUG nova.compute.manager [req-24481814-d9a9-4429-9a03-1c9529b032c6 req-086dff81-d883-43dc-a377-a1345958a018 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Received event network-vif-deleted-8b36d9bf-8d2a-449b-9b2e-813c8c5a1593 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1810.115150] env[62525]: INFO nova.compute.manager [req-24481814-d9a9-4429-9a03-1c9529b032c6 req-086dff81-d883-43dc-a377-a1345958a018 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Neutron deleted interface 8b36d9bf-8d2a-449b-9b2e-813c8c5a1593; detaching it from the instance and deleting it from the info cache [ 1810.115331] env[62525]: DEBUG nova.network.neutron [req-24481814-d9a9-4429-9a03-1c9529b032c6 req-086dff81-d883-43dc-a377-a1345958a018 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.321888] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1810.322169] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1810.322512] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.322512] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1810.322677] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.322843] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1810.323068] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1810.323240] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1810.323408] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1810.323571] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1810.323745] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1810.329466] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32652927-2e02-440b-834d-b7e719881c54 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.347797] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1810.347797] env[62525]: value = "task-1782088" [ 1810.347797] env[62525]: _type = "Task" [ 1810.347797] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.356369] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.451890] env[62525]: DEBUG nova.network.neutron [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.544162] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.607829] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ae528ae2-ef87-4bc9-a21a-0c3712077dbd tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.276s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.622193] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1e49523-2a33-4a77-a952-977a78dd2b49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.632042] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d0de03-b0af-4d38-99a9-f50083f1c30a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.666734] env[62525]: DEBUG nova.compute.manager [req-24481814-d9a9-4429-9a03-1c9529b032c6 req-086dff81-d883-43dc-a377-a1345958a018 service nova] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Detach interface failed, port_id=8b36d9bf-8d2a-449b-9b2e-813c8c5a1593, reason: Instance 9fcec068-4921-4a42-b948-6e61a44658ce could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1810.717648] env[62525]: INFO nova.compute.manager [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Rescuing [ 1810.717784] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.717916] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.718088] env[62525]: DEBUG nova.network.neutron [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1810.858142] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782088, 'name': ReconfigVM_Task, 'duration_secs': 0.430207} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.858465] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1810.958766] env[62525]: INFO nova.compute.manager [-] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Took 1.26 seconds to deallocate network for instance. [ 1810.995168] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.995465] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.995737] env[62525]: DEBUG nova.objects.instance [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'pci_requests' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.043653] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782087, 'name': ReconfigVM_Task, 'duration_secs': 0.850431} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.043993] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 61fa8887-db88-4adc-8c3f-ffc78e0e550d/61fa8887-db88-4adc-8c3f-ffc78e0e550d.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1811.044671] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-daee8113-40e3-4129-8cb8-b8f8b2bb6557 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.050256] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1811.050256] env[62525]: value = "task-1782089" [ 1811.050256] env[62525]: _type = "Task" [ 1811.050256] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.057729] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782089, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.347069] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1811.347381] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1811.347598] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1811.347829] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1811.347987] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1811.348157] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1811.348377] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1811.348539] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1811.348756] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1811.348930] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1811.349153] env[62525]: DEBUG nova.virt.hardware [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1811.350098] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e554ab-9cb6-4eb3-97db-2068b3f72070 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.359134] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91551c30-cf24-4233-8294-adf159b2021e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.366673] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1811.366917] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1811.367085] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1811.367267] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1811.367414] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1811.367597] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1811.367819] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1811.368046] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1811.368202] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1811.368369] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1811.368541] env[62525]: DEBUG nova.virt.hardware [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1811.373930] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1811.374242] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c22e339-3547-4102-be97-ba9ff3a084bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.399026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:71:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd205d712-e184-43b0-93aa-3e45e7674f76', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1811.407979] env[62525]: DEBUG oslo.service.loopingcall [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.408137] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1811.408355] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1811.408355] env[62525]: value = "task-1782090" [ 1811.408355] env[62525]: _type = "Task" [ 1811.408355] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.408629] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caf8b609-4f0f-40e1-ba90-eb0e1938cc40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.433929] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.435149] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1811.435149] env[62525]: value = "task-1782091" [ 1811.435149] env[62525]: _type = "Task" [ 1811.435149] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.444525] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782091, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.467671] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.500056] env[62525]: DEBUG nova.objects.instance [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'numa_topology' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.561096] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782089, 'name': Rename_Task, 'duration_secs': 0.339967} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.561489] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.561764] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2a6608d-e833-4c5e-bbc2-d5c00e10f9bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.564447] env[62525]: DEBUG nova.network.neutron [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updating instance_info_cache with network_info: [{"id": "9fb5fc43-3369-489c-829b-506754512d51", "address": "fa:16:3e:98:ad:28", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fb5fc43-33", "ovs_interfaceid": "9fb5fc43-3369-489c-829b-506754512d51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.570956] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1811.570956] env[62525]: value = "task-1782092" [ 1811.570956] env[62525]: _type = "Task" [ 1811.570956] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.582641] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.934599] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782090, 'name': ReconfigVM_Task, 'duration_secs': 0.234099} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.934961] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1811.935844] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d484e1ab-1f82-4a66-9a04-287e8bd1c147 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.972122] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1811.976996] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01ccfe9f-5ff4-4f17-abd9-cd7e497c2170 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.992554] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782091, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.998265] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1811.998265] env[62525]: value = "task-1782093" [ 1811.998265] env[62525]: _type = "Task" [ 1811.998265] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.002696] env[62525]: INFO nova.compute.claims [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1812.013201] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782093, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.067568] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "refresh_cache-69a1093a-95d7-4cbb-90bf-1a213470872a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.085724] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782092, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.449721] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782091, 'name': CreateVM_Task, 'duration_secs': 0.621611} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.450034] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1812.450748] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.450991] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.451343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1812.451619] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3e2b270-b81a-4f0e-83cf-a3d4b0ac9b79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.458179] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1812.458179] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526f9a87-241c-1dc9-361f-5958d8817f76" [ 1812.458179] env[62525]: _type = "Task" [ 1812.458179] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.467699] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526f9a87-241c-1dc9-361f-5958d8817f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.512804] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782093, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.580692] env[62525]: DEBUG oslo_vmware.api [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782092, 'name': PowerOnVM_Task, 'duration_secs': 0.64123} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.580983] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1812.581216] env[62525]: INFO nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1812.581405] env[62525]: DEBUG nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1812.582291] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3bae7b-8fa7-4e91-8ce1-db443442fe45 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.602951] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1812.603273] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50925fd8-47f5-4c6e-8304-2ca711354127 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.610147] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1812.610147] env[62525]: value = "task-1782094" [ 1812.610147] env[62525]: _type = "Task" [ 1812.610147] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.619299] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.969102] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526f9a87-241c-1dc9-361f-5958d8817f76, 'name': SearchDatastore_Task, 'duration_secs': 0.013914} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.969441] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.969717] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1812.969963] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.970128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.970315] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.970604] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd1c671e-d537-4a45-a05b-f3d0b6887b89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.980726] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.980726] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1812.981285] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a37f946-77f5-4982-a1e9-06b5ab7cec7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.989309] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1812.989309] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]521a7674-5699-bcb1-76b9-17bca3db2c20" [ 1812.989309] env[62525]: _type = "Task" [ 1812.989309] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.000241] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a7674-5699-bcb1-76b9-17bca3db2c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.008320] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782093, 'name': ReconfigVM_Task, 'duration_secs': 0.522209} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.008603] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67/5a40ca03-f61c-4232-80dc-7a745a34bc67.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1813.009124] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1813.102254] env[62525]: INFO nova.compute.manager [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Took 23.32 seconds to build instance. [ 1813.123621] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782094, 'name': PowerOffVM_Task, 'duration_secs': 0.371972} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.123905] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1813.129019] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf85952-fcf9-43f7-9e50-2214538207da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.154455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b661d92e-c470-483a-b6c1-a1df11ed1108 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.200152] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.200461] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f6cb117-55ed-4bb7-8e47-b261348ea8ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.211016] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1813.211016] env[62525]: value = "task-1782095" [ 1813.211016] env[62525]: _type = "Task" [ 1813.211016] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.224402] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1813.224672] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1813.224894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.263111] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad2dbf8-dc81-4d2d-882d-813d20a2ce7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.270802] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9473ee9f-e2b8-4bed-9f67-dbbc6095665c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.303721] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d4ac33-a4fb-446a-b992-aec4ec5fdfac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.311648] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7f305a-8b0f-4a8e-94f6-7132a4e67fa7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.325800] env[62525]: DEBUG nova.compute.provider_tree [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.499162] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]521a7674-5699-bcb1-76b9-17bca3db2c20, 'name': SearchDatastore_Task, 'duration_secs': 0.013115} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.500090] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-258ce9a5-bb0f-4a0d-83bd-2eeae615b577 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.506438] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1813.506438] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526dfebd-42e5-da3d-5e7a-57621c09d254" [ 1813.506438] env[62525]: _type = "Task" [ 1813.506438] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.520021] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526dfebd-42e5-da3d-5e7a-57621c09d254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.520021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3446ca7d-4cd5-4f74-b319-2c6232b70050 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.537782] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b3288e-2b41-4d03-9f55-5e20132f1527 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.556108] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1813.603993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f53f00a-0fdf-4fd8-b4ad-ca83d6e7d357 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.833s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.832618] env[62525]: DEBUG nova.scheduler.client.report [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.017094] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526dfebd-42e5-da3d-5e7a-57621c09d254, 'name': SearchDatastore_Task, 'duration_secs': 0.014747} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.017365] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.017650] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1814.017950] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.018156] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1814.018380] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a24f132e-e05a-4b91-b687-d8bf57ed78dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.020269] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a555d4-bf0c-422d-b592-0160b127a74e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.028148] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1814.028148] env[62525]: value = "task-1782096" [ 1814.028148] env[62525]: _type = "Task" [ 1814.028148] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.031883] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1814.032096] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1814.033071] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-665a7dd7-21d1-41e5-bed7-26978b64b9a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.038038] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.040876] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1814.040876] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52373b43-ecf7-09e1-2f24-5ad84d5e49b1" [ 1814.040876] env[62525]: _type = "Task" [ 1814.040876] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.047783] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52373b43-ecf7-09e1-2f24-5ad84d5e49b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.099030] env[62525]: DEBUG nova.network.neutron [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Port 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1814.250500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.250840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.299876] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.300124] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.337273] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.342s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.339557] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.872s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.339823] env[62525]: DEBUG nova.objects.instance [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lazy-loading 'resources' on Instance uuid 9fcec068-4921-4a42-b948-6e61a44658ce {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1814.368748] env[62525]: INFO nova.network.neutron [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating port b40cb3df-4673-45d7-8b69-c642a8939d96 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1814.539149] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782096, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.551060] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52373b43-ecf7-09e1-2f24-5ad84d5e49b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.551779] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dad6c3d2-a829-446d-a1a2-b3056244c46f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.557462] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1814.557462] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5248fdaa-6ffe-4cc9-dcf0-904a1e0ca266" [ 1814.557462] env[62525]: _type = "Task" [ 1814.557462] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.567203] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5248fdaa-6ffe-4cc9-dcf0-904a1e0ca266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.753332] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1814.802204] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1815.042747] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.794715} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.043091] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1815.043355] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1815.043625] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14551554-f865-42fc-8914-ef835f31adcd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.053816] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1815.053816] env[62525]: value = "task-1782097" [ 1815.053816] env[62525]: _type = "Task" [ 1815.053816] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.059016] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2255d6a0-983f-4efa-bf79-ddb0368fe976 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.069274] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.076268] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3262204c-be8a-4963-abe6-7fdbedf74939 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.079573] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5248fdaa-6ffe-4cc9-dcf0-904a1e0ca266, 'name': SearchDatastore_Task, 'duration_secs': 0.051306} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.079896] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.080215] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1815.080831] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb2567d4-7ec4-4f38-a87e-8ac9d72bc0e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.121348] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d94e4b6-d451-4227-a018-9290542da79e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.124258] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1815.124258] env[62525]: value = "task-1782098" [ 1815.124258] env[62525]: _type = "Task" [ 1815.124258] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.128730] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.128984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.129140] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.137388] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d56a31f-111b-4519-b4fd-c3f59b0be5fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.147454] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.157933] env[62525]: DEBUG nova.compute.provider_tree [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1815.273562] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.325541] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.564225] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169674} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.564439] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1815.565256] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfc0ebd-f938-4ef7-8c71-99bcd6a0e854 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.587772] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1815.588077] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54dca61a-8ad9-4677-95a2-286f23191067 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.607879] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1815.607879] env[62525]: value = "task-1782099" [ 1815.607879] env[62525]: _type = "Task" [ 1815.607879] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.615915] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.640496] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.661489] env[62525]: DEBUG nova.scheduler.client.report [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1815.820887] env[62525]: DEBUG nova.compute.manager [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1815.821195] env[62525]: DEBUG oslo_concurrency.lockutils [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.821376] env[62525]: DEBUG oslo_concurrency.lockutils [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.821547] env[62525]: DEBUG oslo_concurrency.lockutils [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.821720] env[62525]: DEBUG nova.compute.manager [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] No waiting events found dispatching network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1815.821861] env[62525]: WARNING nova.compute.manager [req-9ae330f9-07eb-4f36-a440-2a90ad2775e6 req-ebb0c32a-a79b-4c76-968e-6871ff7deaa2 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received unexpected event network-vif-plugged-b40cb3df-4673-45d7-8b69-c642a8939d96 for instance with vm_state shelved_offloaded and task_state spawning. [ 1815.922253] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.922484] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.922624] env[62525]: DEBUG nova.network.neutron [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.118845] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.144813] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782098, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.166106] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.826s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.168826] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.895s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.170529] env[62525]: INFO nova.compute.claims [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1816.174099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.174284] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.174465] env[62525]: DEBUG nova.network.neutron [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.192266] env[62525]: INFO nova.scheduler.client.report [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted allocations for instance 9fcec068-4921-4a42-b948-6e61a44658ce [ 1816.621793] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.640268] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782098, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.183133} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.640536] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1816.641339] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3e5b0c-04e9-439a-b7d8-f6b52bb0ee60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.666770] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1816.667924] env[62525]: DEBUG nova.network.neutron [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.669172] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-559f3e8f-d676-4485-ae36-cef640713a8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.693042] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1816.693042] env[62525]: value = "task-1782100" [ 1816.693042] env[62525]: _type = "Task" [ 1816.693042] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.702561] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782100, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.702993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e9db9be2-a405-495e-8cc5-fbdaf9e9d571 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "9fcec068-4921-4a42-b948-6e61a44658ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.287s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.916942] env[62525]: DEBUG nova.network.neutron [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.118874] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782099, 'name': ReconfigVM_Task, 'duration_secs': 1.304888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.119111] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb/50ee564d-7b27-4bc4-a95e-7717de865cfb.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.120472] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'guest_format': None, 'encryption_format': None, 'encryption_options': None, 'boot_index': 0, 'device_type': 'disk', 'image_id': 'a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'}, 'device_type': None, 'mount_device': '/dev/sdb', 'attachment_id': '03da3655-3d50-4297-aafe-75ec00f3477f', 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62525) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1817.120709] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1817.120885] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1817.121665] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c550c3bc-682b-45fe-a14e-7a3a0e65700a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.137225] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5897f45-070d-4912-8025-a03ea135f4ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.161905] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1817.162437] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3d9c3d9-6efc-49a1-9642-d2bf8c3108e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.181547] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1817.181547] env[62525]: value = "task-1782101" [ 1817.181547] env[62525]: _type = "Task" [ 1817.181547] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.185032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.203849] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782101, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.211941] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782100, 'name': ReconfigVM_Task, 'duration_secs': 0.329624} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.216806] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.217951] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18966490-e9d0-4d1b-9ee6-e661718a7eb9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.248636] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d6a2e2b2802032cd936cf5fe1b9d0aec',container_format='bare',created_at=2024-12-12T00:17:36Z,direct_url=,disk_format='vmdk',id=4bcdf024-5b6a-4101-8dee-23681d55ab37,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1623658510-shelved',owner='209b99adb38b4c8b9e5a277019dbe292',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2024-12-12T00:17:49Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1817.248942] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1817.249159] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.249402] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1817.249584] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.249807] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1817.250071] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1817.250281] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1817.250493] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1817.250706] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1817.250936] env[62525]: DEBUG nova.virt.hardware [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1817.251283] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11c24c2f-9a0a-42d2-aaa0-560007fabb68 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.261942] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2497bade-cacc-4a9f-b0a3-cb52ed47b85f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.272198] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a05f504-ac58-4e31-a430-1cb6ba1a20c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.276009] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1817.276009] env[62525]: value = "task-1782102" [ 1817.276009] env[62525]: _type = "Task" [ 1817.276009] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.290134] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:4a:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b40cb3df-4673-45d7-8b69-c642a8939d96', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1817.298116] env[62525]: DEBUG oslo.service.loopingcall [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.301200] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1817.301615] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-616a80a1-1f1f-48cf-be01-1aec37362ec3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.321281] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.328130] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1817.328130] env[62525]: value = "task-1782103" [ 1817.328130] env[62525]: _type = "Task" [ 1817.328130] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.334209] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782103, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.420195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.453030] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabc0d91-f669-4975-b320-4871e6ccf4fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.463270] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2cadb0-9379-450a-b4d7-0b97695571bd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.496400] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b14442-7353-45a5-b3b1-48dade9885a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.505604] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a060fe7e-b530-49d1-8482-b3c052bb57ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.520995] env[62525]: DEBUG nova.compute.provider_tree [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1817.692028] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.788672] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782102, 'name': ReconfigVM_Task, 'duration_secs': 0.149495} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.788965] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1817.789248] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4a69d64-fcd1-4ab2-add9-c81895f9033e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.796123] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1817.796123] env[62525]: value = "task-1782104" [ 1817.796123] env[62525]: _type = "Task" [ 1817.796123] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.803890] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.837544] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782103, 'name': CreateVM_Task, 'duration_secs': 0.389821} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.837544] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1817.837684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.838596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.838596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1817.838596] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6832cca3-2503-49ba-9339-21895b375fd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.843206] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1817.843206] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5256dffc-e70c-08b6-60d5-bfd8c7772f8a" [ 1817.843206] env[62525]: _type = "Task" [ 1817.843206] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.851087] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5256dffc-e70c-08b6-60d5-bfd8c7772f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.853343] env[62525]: DEBUG nova.compute.manager [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1817.853530] env[62525]: DEBUG nova.compute.manager [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing instance network info cache due to event network-changed-b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1817.853741] env[62525]: DEBUG oslo_concurrency.lockutils [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] Acquiring lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.853885] env[62525]: DEBUG oslo_concurrency.lockutils [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] Acquired lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.854058] env[62525]: DEBUG nova.network.neutron [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Refreshing network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.941261] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3118303-0406-4ec7-864a-bb076e48a2a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.962095] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57151d8-1153-4894-8adc-dcba13f81756 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.969833] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1818.024672] env[62525]: DEBUG nova.scheduler.client.report [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1818.038230] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.038502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.038713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.038896] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.039089] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.041250] env[62525]: INFO nova.compute.manager [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Terminating instance [ 1818.043027] env[62525]: DEBUG nova.compute.manager [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1818.043232] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1818.044085] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc9746f-6775-430e-b78d-010cdeb4ec80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.052657] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1818.052902] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f3f638c-dcab-4adc-aacf-cc1468112398 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.060156] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1818.060156] env[62525]: value = "task-1782105" [ 1818.060156] env[62525]: _type = "Task" [ 1818.060156] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.068698] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.111676] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.112017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.112359] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.112596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.112805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.115578] env[62525]: INFO nova.compute.manager [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Terminating instance [ 1818.117470] env[62525]: DEBUG nova.compute.manager [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1818.117673] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1818.118572] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e9c2f2-88f7-4c08-b782-d27fec374652 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.127360] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1818.127621] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c99ea6e6-7dc4-4a61-9eb8-fdc0437e0b47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.135801] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1818.135801] env[62525]: value = "task-1782106" [ 1818.135801] env[62525]: _type = "Task" [ 1818.135801] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.142457] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.191566] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782101, 'name': ReconfigVM_Task, 'duration_secs': 0.728901} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.191884] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1818.196700] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9194d4a-4c5e-42fb-a5f5-35b843d9a8d3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.212719] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1818.212719] env[62525]: value = "task-1782107" [ 1818.212719] env[62525]: _type = "Task" [ 1818.212719] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.221686] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782107, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.308034] env[62525]: DEBUG oslo_vmware.api [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782104, 'name': PowerOnVM_Task, 'duration_secs': 0.391518} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.308348] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1818.311789] env[62525]: DEBUG nova.compute.manager [None req-bf34b294-d8dc-42c9-b914-3f694ee643cf tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1818.312613] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307f3bca-9b82-422d-92b3-d70fcef537e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.354133] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.354552] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Processing image 4bcdf024-5b6a-4101-8dee-23681d55ab37 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1818.354643] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.354770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.354950] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1818.355478] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-089b82c5-7f28-46a3-8b98-29c968d1df58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.367911] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1818.368172] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1818.368931] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef7abf78-a085-4df4-bbe6-cb069bd9ef9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.374509] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1818.374509] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5234fd3f-dbf9-e595-f1e0-6c44147d4a88" [ 1818.374509] env[62525]: _type = "Task" [ 1818.374509] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.383558] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5234fd3f-dbf9-e595-f1e0-6c44147d4a88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.476025] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1818.476326] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e5dcf0e-3492-48c4-8876-6317d615ddf1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.483856] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1818.483856] env[62525]: value = "task-1782108" [ 1818.483856] env[62525]: _type = "Task" [ 1818.483856] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.492337] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.529912] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.530451] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1818.533196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.209s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.534656] env[62525]: INFO nova.compute.claims [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1818.570639] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782105, 'name': PowerOffVM_Task, 'duration_secs': 0.24477} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.570961] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1818.571260] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1818.572073] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea89c374-aae3-4a03-86ae-6a86a356b9c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.593721] env[62525]: DEBUG nova.network.neutron [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updated VIF entry in instance network info cache for port b40cb3df-4673-45d7-8b69-c642a8939d96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.594117] env[62525]: DEBUG nova.network.neutron [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [{"id": "b40cb3df-4673-45d7-8b69-c642a8939d96", "address": "fa:16:3e:c7:4a:49", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb40cb3df-46", "ovs_interfaceid": "b40cb3df-4673-45d7-8b69-c642a8939d96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.645026] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782106, 'name': PowerOffVM_Task, 'duration_secs': 0.194349} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.645318] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1818.645601] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1818.645775] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20a22977-cca8-445a-b291-62ad80aaf34e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.694923] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1818.695184] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1818.695365] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleting the datastore file [datastore1] c14f3fb8-3090-4df3-9e78-57ee9d62921f {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1818.695630] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25db48c8-f750-41cf-805a-9716a71aa284 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.702171] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1818.702171] env[62525]: value = "task-1782111" [ 1818.702171] env[62525]: _type = "Task" [ 1818.702171] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.711499] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.712905] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1818.713142] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1818.713332] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleting the datastore file [datastore1] 467c6af1-2961-4213-8f0c-fe7591d93b5d {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1818.713652] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81a70785-0866-4241-9553-0d6b3e4be1c0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.724717] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for the task: (returnval){ [ 1818.724717] env[62525]: value = "task-1782112" [ 1818.724717] env[62525]: _type = "Task" [ 1818.724717] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.724951] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782107, 'name': ReconfigVM_Task, 'duration_secs': 0.192341} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.725349] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1818.728639] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-606b1a18-9fb5-4952-b929-5e5a77e6778f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.735935] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.737526] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1818.737526] env[62525]: value = "task-1782113" [ 1818.737526] env[62525]: _type = "Task" [ 1818.737526] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.745935] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782113, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.887084] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1818.887356] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Fetch image to [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51/OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1818.887552] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Downloading stream optimized image 4bcdf024-5b6a-4101-8dee-23681d55ab37 to [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51/OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51.vmdk on the data store datastore1 as vApp {{(pid=62525) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1818.887852] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Downloading image file data 4bcdf024-5b6a-4101-8dee-23681d55ab37 to the ESX as VM named 'OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51' {{(pid=62525) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1818.962766] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1818.962766] env[62525]: value = "resgroup-9" [ 1818.962766] env[62525]: _type = "ResourcePool" [ 1818.962766] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1818.963178] env[62525]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c4f93de4-4d0c-4fa1-a15d-c20b1e1ea738 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.984648] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease: (returnval){ [ 1818.984648] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1818.984648] env[62525]: _type = "HttpNfcLease" [ 1818.984648] env[62525]: } obtained for vApp import into resource pool (val){ [ 1818.984648] env[62525]: value = "resgroup-9" [ 1818.984648] env[62525]: _type = "ResourcePool" [ 1818.984648] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1818.984977] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the lease: (returnval){ [ 1818.984977] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1818.984977] env[62525]: _type = "HttpNfcLease" [ 1818.984977] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1818.995871] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782108, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.997287] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1818.997287] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1818.997287] env[62525]: _type = "HttpNfcLease" [ 1818.997287] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1819.038854] env[62525]: DEBUG nova.compute.utils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.042467] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1819.042653] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1819.095548] env[62525]: DEBUG nova.policy [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1819.097350] env[62525]: DEBUG oslo_concurrency.lockutils [req-8f2d13ab-b635-476f-915d-1cec464c3add req-23f48107-f11c-41a1-9c92-3b7e49ab43d9 service nova] Releasing lock "refresh_cache-cb043ab8-dff7-48c6-b50b-a4d77a01eb41" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.212955] env[62525]: DEBUG oslo_vmware.api [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171083} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.213307] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1819.213560] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1819.213819] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1819.214058] env[62525]: INFO nova.compute.manager [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1819.214372] env[62525]: DEBUG oslo.service.loopingcall [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1819.214627] env[62525]: DEBUG nova.compute.manager [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1819.214757] env[62525]: DEBUG nova.network.neutron [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1819.237733] env[62525]: DEBUG oslo_vmware.api [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Task: {'id': task-1782112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17897} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.238266] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1819.238664] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1819.238967] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1819.239868] env[62525]: INFO nova.compute.manager [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1819.240254] env[62525]: DEBUG oslo.service.loopingcall [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1819.245033] env[62525]: DEBUG nova.compute.manager [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1819.245159] env[62525]: DEBUG nova.network.neutron [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1819.252310] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782113, 'name': Rename_Task, 'duration_secs': 0.188181} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.252598] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1819.252801] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-caf05279-fcdb-43ea-b857-4704658232be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.259507] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1819.259507] env[62525]: value = "task-1782115" [ 1819.259507] env[62525]: _type = "Task" [ 1819.259507] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.267722] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.405984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "154ac489-69e4-41a8-90cf-b3d6196c4822" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.406317] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.510652] env[62525]: DEBUG oslo_vmware.api [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782108, 'name': PowerOnVM_Task, 'duration_secs': 0.793348} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.510816] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1819.510816] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1819.510816] env[62525]: _type = "HttpNfcLease" [ 1819.510816] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1819.511149] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1819.511464] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-30d6ceab-df55-40c6-83af-7be5906f6c5b tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance '5a40ca03-f61c-4232-80dc-7a745a34bc67' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1819.550559] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1819.605578] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Successfully created port: fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1819.775120] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782115, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.797506] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da2b57e-731e-491c-b06c-5c0312f12558 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.805359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd001d0a-dcd7-4f37-8378-58f93686ccf0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.844687] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71a7a3a-cbeb-4063-9917-a06a661a0dc5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.853901] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f58899-d8cd-4a2a-907c-151a0d5c8bda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.869688] env[62525]: DEBUG nova.compute.provider_tree [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.910214] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1819.918975] env[62525]: DEBUG nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Received event network-vif-deleted-3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.919282] env[62525]: INFO nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Neutron deleted interface 3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6; detaching it from the instance and deleting it from the info cache [ 1819.919530] env[62525]: DEBUG nova.network.neutron [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.001088] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1820.001088] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1820.001088] env[62525]: _type = "HttpNfcLease" [ 1820.001088] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1820.001088] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1820.001088] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b554c2-fbeb-5e4c-3fa4-2fbdcb6d9ad4" [ 1820.001088] env[62525]: _type = "HttpNfcLease" [ 1820.001088] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1820.001088] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3ca443-93ef-441e-9953-dd885bb34c69 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.008156] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1820.008445] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1820.077595] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cfa06a9e-6fea-437f-bcc0-9e812ab456fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.270942] env[62525]: DEBUG oslo_vmware.api [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782115, 'name': PowerOnVM_Task, 'duration_secs': 0.773288} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.271247] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1820.271445] env[62525]: DEBUG nova.compute.manager [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1820.272242] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661cdd8c-8419-4668-a648-7651a428b2af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.343289] env[62525]: DEBUG nova.network.neutron [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.349226] env[62525]: DEBUG nova.network.neutron [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.373126] env[62525]: DEBUG nova.scheduler.client.report [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1820.422934] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72659fed-1a25-490d-9e4f-8882144276d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.432544] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265cc631-7752-47ca-ae59-e3ac2c4d1a03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.443784] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.463723] env[62525]: DEBUG nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Detach interface failed, port_id=3c768ff1-29a5-42ad-b8f4-92d5fbfdbbe6, reason: Instance c14f3fb8-3090-4df3-9e78-57ee9d62921f could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1820.463950] env[62525]: DEBUG nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Received event network-vif-deleted-598582c7-f89c-4afd-acf4-81a44e028139 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1820.464219] env[62525]: INFO nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Neutron deleted interface 598582c7-f89c-4afd-acf4-81a44e028139; detaching it from the instance and deleting it from the info cache [ 1820.464419] env[62525]: DEBUG nova.network.neutron [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.582327] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1820.609766] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1820.610069] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1820.610230] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1820.610411] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1820.610555] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1820.610695] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1820.610899] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1820.611069] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1820.611236] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1820.611404] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1820.611564] env[62525]: DEBUG nova.virt.hardware [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1820.612425] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5a761e-0183-4900-b87e-46f7fb7c7fae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.622161] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed35ff6-a17b-4b9b-8395-ce3910ecf14b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.787934] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.845606] env[62525]: INFO nova.compute.manager [-] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Took 1.63 seconds to deallocate network for instance. [ 1820.850954] env[62525]: INFO nova.compute.manager [-] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Took 1.61 seconds to deallocate network for instance. [ 1820.878584] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.879176] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1820.884391] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.441s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.885873] env[62525]: INFO nova.compute.claims [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.966425] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c04178a-0983-46be-a06c-9654dc0c4283 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.977204] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d84003-acf2-4c37-a388-941a013ac610 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.014892] env[62525]: DEBUG nova.compute.manager [req-efc7503e-7a43-4ccb-a596-8f6c3c181c6a req-7d1061e2-4ab4-4ff5-b7d8-fa5992713c89 service nova] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Detach interface failed, port_id=598582c7-f89c-4afd-acf4-81a44e028139, reason: Instance 467c6af1-2961-4213-8f0c-fe7591d93b5d could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1821.355133] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.359733] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.394987] env[62525]: DEBUG nova.compute.utils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1821.397118] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1821.397376] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1821.504816] env[62525]: DEBUG nova.policy [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '887b4b5be3e644a182ced389f3213be3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ea75b422b034b2b8bc55de69766ba75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1821.517715] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1821.517715] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1821.518837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae06efc0-213e-4b8c-a4f8-69d0f5f30301 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.526492] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1821.526590] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1821.527417] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Successfully updated port: fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1821.528948] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-09ce123d-8721-4c02-9880-6a43697f6bf6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.580286] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.580558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.580757] env[62525]: DEBUG nova.compute.manager [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Going to confirm migration 5 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1821.616070] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.616253] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.764754] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Successfully created port: 5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1821.902323] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1821.944934] env[62525]: DEBUG nova.compute.manager [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Received event network-vif-plugged-fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.945193] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Acquiring lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.945398] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.945630] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.945740] env[62525]: DEBUG nova.compute.manager [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] No waiting events found dispatching network-vif-plugged-fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.945915] env[62525]: WARNING nova.compute.manager [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Received unexpected event network-vif-plugged-fa019fa8-6752-4b33-877c-63d55cadbf80 for instance with vm_state building and task_state spawning. [ 1821.946137] env[62525]: DEBUG nova.compute.manager [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Received event network-changed-fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.946247] env[62525]: DEBUG nova.compute.manager [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Refreshing instance network info cache due to event network-changed-fa019fa8-6752-4b33-877c-63d55cadbf80. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1821.946591] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Acquiring lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.946591] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Acquired lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.946769] env[62525]: DEBUG nova.network.neutron [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Refreshing network info cache for port fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.999552] env[62525]: DEBUG oslo_vmware.rw_handles [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529d9fe0-de25-8a9b-780f-c42a96b1a6f5/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1822.000289] env[62525]: INFO nova.virt.vmwareapi.images [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Downloaded image file data 4bcdf024-5b6a-4101-8dee-23681d55ab37 [ 1822.000715] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78ab28b-f9ea-4a79-aa2e-fa82d6b9f250 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.022591] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1751c2d0-7801-4b34-8ba4-ac01fe06f819 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.032826] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.079297] env[62525]: INFO nova.virt.vmwareapi.images [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] The imported VM was unregistered [ 1822.082692] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1822.082803] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.087767] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99e42aae-8f40-4963-b5df-70fa63429581 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.113898] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.113898] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51/OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51.vmdk to [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk. {{(pid=62525) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1822.113898] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-285150f9-e187-4862-8aae-927a36631e33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.117769] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1822.117769] env[62525]: value = "task-1782117" [ 1822.117769] env[62525]: _type = "Task" [ 1822.117769] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.133320] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.133536] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.133703] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1822.157064] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c908533-a49b-4d0c-9dd4-2538d8a72344 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.160881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.161211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.161476] env[62525]: DEBUG nova.network.neutron [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.161725] env[62525]: DEBUG nova.objects.instance [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'info_cache' on Instance uuid 5a40ca03-f61c-4232-80dc-7a745a34bc67 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.170133] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048b182-3b8b-4f3b-9f09-ff924463ddc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.200935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051ef2f7-1617-409d-b529-8ada00e15c38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.208722] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13136eb6-1872-4f92-9a65-cdd61d1757a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.222894] env[62525]: DEBUG nova.compute.provider_tree [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.480954] env[62525]: DEBUG nova.network.neutron [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1822.557875] env[62525]: DEBUG nova.network.neutron [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.630050] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.726864] env[62525]: DEBUG nova.scheduler.client.report [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1822.913122] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1822.943014] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1822.943330] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1822.943505] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1822.943708] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1822.943881] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1822.944041] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1822.944283] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1822.944451] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1822.944646] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1822.944836] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1822.945026] env[62525]: DEBUG nova.virt.hardware [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1822.946031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968f16d0-89ea-434a-a447-4a0a50869cb5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.956534] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee73335-964f-4064-a15b-c6e402a27a94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.060760] env[62525]: DEBUG oslo_concurrency.lockutils [req-2e8439d1-6340-4fc7-84c4-4f2f8bdb6f8b req-45e42dff-a31b-439b-a484-dd8ff98b7714 service nova] Releasing lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.062339] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.062339] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1823.130125] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.231947] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.232493] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1823.239024] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.448s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.239024] env[62525]: DEBUG nova.objects.instance [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62525) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1823.425791] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Successfully updated port: 5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1823.474280] env[62525]: DEBUG nova.network.neutron [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [{"id": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "address": "fa:16:3e:44:65:d2", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c0d4e5d-c0", "ovs_interfaceid": "6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.596165] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1823.630923] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.740270] env[62525]: DEBUG nova.compute.utils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1823.745771] env[62525]: DEBUG nova.network.neutron [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Updating instance_info_cache with network_info: [{"id": "fa019fa8-6752-4b33-877c-63d55cadbf80", "address": "fa:16:3e:55:82:bd", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa019fa8-67", "ovs_interfaceid": "fa019fa8-6752-4b33-877c-63d55cadbf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.746859] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1823.747048] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1823.789836] env[62525]: DEBUG nova.policy [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1da85b41cee4803bb8d572bb37a84db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3fe67f961db46b9b3e2c37789829a2c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1823.930601] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.930601] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.930601] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1823.977409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-5a40ca03-f61c-4232-80dc-7a745a34bc67" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.977687] env[62525]: DEBUG nova.objects.instance [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'migration_context' on Instance uuid 5a40ca03-f61c-4232-80dc-7a745a34bc67 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.983294] env[62525]: DEBUG nova.compute.manager [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received event network-vif-plugged-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.983511] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Acquiring lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.983753] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.983878] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.984101] env[62525]: DEBUG nova.compute.manager [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] No waiting events found dispatching network-vif-plugged-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1823.984224] env[62525]: WARNING nova.compute.manager [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received unexpected event network-vif-plugged-5e23aff4-06c8-4549-b425-5b83423352ce for instance with vm_state building and task_state spawning. [ 1823.984379] env[62525]: DEBUG nova.compute.manager [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.984561] env[62525]: DEBUG nova.compute.manager [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing instance network info cache due to event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1823.984728] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Acquiring lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.089175] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Successfully created port: 2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1824.132493] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.190288] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.190445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.190598] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1824.246098] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1824.252297] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c8c118d5-24d1-4383-8a69-d351385bf89a tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.252541] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-b7768ee1-16f7-40f0-9f5f-28df4a1580f2" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.252733] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance network_info: |[{"id": "fa019fa8-6752-4b33-877c-63d55cadbf80", "address": "fa:16:3e:55:82:bd", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa019fa8-67", "ovs_interfaceid": "fa019fa8-6752-4b33-877c-63d55cadbf80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1824.253479] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.898s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.253479] env[62525]: DEBUG nova.objects.instance [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lazy-loading 'resources' on Instance uuid c14f3fb8-3090-4df3-9e78-57ee9d62921f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.254779] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:82:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa019fa8-6752-4b33-877c-63d55cadbf80', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1824.263773] env[62525]: DEBUG oslo.service.loopingcall [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.264542] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1824.264774] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-785cb56c-4a37-40df-87fb-f27e5100a09b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.291114] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1824.291114] env[62525]: value = "task-1782118" [ 1824.291114] env[62525]: _type = "Task" [ 1824.291114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.300665] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782118, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.480790] env[62525]: DEBUG nova.objects.base [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Object Instance<5a40ca03-f61c-4232-80dc-7a745a34bc67> lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1824.481930] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cdc0b7-ff11-4458-87d5-56dd13604425 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.487765] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1824.507534] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad4375c-9ba3-424f-b658-934b83398a51 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.515745] env[62525]: DEBUG oslo_vmware.api [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1824.515745] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b5ad7c-00fc-9311-4d51-e110144fde4c" [ 1824.515745] env[62525]: _type = "Task" [ 1824.515745] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.526593] env[62525]: DEBUG oslo_vmware.api [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b5ad7c-00fc-9311-4d51-e110144fde4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.631336] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.665066] env[62525]: DEBUG nova.network.neutron [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [{"id": "5e23aff4-06c8-4549-b425-5b83423352ce", "address": "fa:16:3e:6e:b4:41", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e23aff4-06", "ovs_interfaceid": "5e23aff4-06c8-4549-b425-5b83423352ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.799303] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782118, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.956640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e29fe34-9abf-42ed-a71e-7388dbafe9f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.964945] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88d7c27-3d09-4870-94fe-a7ef2d5c6e43 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.995476] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfea8a9-4382-4ef6-bbea-57b09063e797 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.003100] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2139e5f-f145-4637-9f72-b8f0176654d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.016768] env[62525]: DEBUG nova.compute.provider_tree [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.026862] env[62525]: DEBUG oslo_vmware.api [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b5ad7c-00fc-9311-4d51-e110144fde4c, 'name': SearchDatastore_Task, 'duration_secs': 0.049736} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.027788] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.131757] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782117, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.697777} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.132141] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51/OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51.vmdk to [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk. [ 1825.132222] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Cleaning up location [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1825.132421] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_14487011-78c3-4978-9d9b-8359a42dcd51 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.132702] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a2ef2d3-d7ea-4eeb-ad1b-2447f59bb22e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.139377] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1825.139377] env[62525]: value = "task-1782119" [ 1825.139377] env[62525]: _type = "Task" [ 1825.139377] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.146926] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.167933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.168308] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Instance network_info: |[{"id": "5e23aff4-06c8-4549-b425-5b83423352ce", "address": "fa:16:3e:6e:b4:41", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e23aff4-06", "ovs_interfaceid": "5e23aff4-06c8-4549-b425-5b83423352ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1825.168571] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Acquired lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.168745] env[62525]: DEBUG nova.network.neutron [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.169936] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:b4:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e23aff4-06c8-4549-b425-5b83423352ce', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.177929] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Creating folder: Project (0ea75b422b034b2b8bc55de69766ba75). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1825.181045] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d42e8c2-e293-45c7-ba92-4a1dfe5d4082 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.192673] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Created folder: Project (0ea75b422b034b2b8bc55de69766ba75) in parent group-v369553. [ 1825.192877] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Creating folder: Instances. Parent ref: group-v369849. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1825.193128] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bba38eb-1a94-420b-a01f-19bebaecd28c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.203064] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Created folder: Instances in parent group-v369849. [ 1825.203173] env[62525]: DEBUG oslo.service.loopingcall [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.203329] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.203529] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-967b1b45-19ca-4086-a6c6-7482a34511f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.228218] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.228218] env[62525]: value = "task-1782122" [ 1825.228218] env[62525]: _type = "Task" [ 1825.228218] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.236424] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782122, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.256113] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1825.280988] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1825.281198] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1825.281362] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1825.281551] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1825.281725] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1825.281851] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1825.282053] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1825.282224] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1825.282597] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1825.282774] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1825.282945] env[62525]: DEBUG nova.virt.hardware [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1825.283810] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f909fb23-ef51-4ae2-abd9-d937cd465d8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.298189] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2126664-df30-48f7-9195-137041a2aa89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.305085] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782118, 'name': CreateVM_Task, 'duration_secs': 0.658053} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.305571] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1825.306259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.306430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.306754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1825.307011] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ca6327b-dd21-4b29-b4de-f714fe21840f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.323029] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1825.323029] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526c0aa7-ea5a-8afc-6afc-d97a59e8bd26" [ 1825.323029] env[62525]: _type = "Task" [ 1825.323029] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.329945] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526c0aa7-ea5a-8afc-6afc-d97a59e8bd26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.522932] env[62525]: DEBUG nova.scheduler.client.report [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1825.579183] env[62525]: DEBUG nova.network.neutron [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updated VIF entry in instance network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1825.579556] env[62525]: DEBUG nova.network.neutron [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [{"id": "5e23aff4-06c8-4549-b425-5b83423352ce", "address": "fa:16:3e:6e:b4:41", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e23aff4-06", "ovs_interfaceid": "5e23aff4-06c8-4549-b425-5b83423352ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.617012] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.651791] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190633} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.652104] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1825.652322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.652532] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk to [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.652796] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f645834-e8c8-4cea-a53a-342b50a461bb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.659647] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1825.659647] env[62525]: value = "task-1782123" [ 1825.659647] env[62525]: _type = "Task" [ 1825.659647] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.667770] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.738659] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782122, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.769296] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Successfully updated port: 2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1825.833244] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526c0aa7-ea5a-8afc-6afc-d97a59e8bd26, 'name': SearchDatastore_Task, 'duration_secs': 0.016263} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.837184] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.837184] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1825.837184] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.837184] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.837184] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1825.837184] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-868b82f4-f9a0-4889-a076-c211404ccf9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.842500] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1825.842699] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1825.843470] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cbb6e21-5a04-466e-9804-008069be31e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.849299] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1825.849299] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e21fea-96ae-c82b-f6a8-0e0dadc20311" [ 1825.849299] env[62525]: _type = "Task" [ 1825.849299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.857007] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e21fea-96ae-c82b-f6a8-0e0dadc20311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.016701] env[62525]: DEBUG nova.compute.manager [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Received event network-vif-plugged-2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1826.016933] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Acquiring lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.017178] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.017353] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.017522] env[62525]: DEBUG nova.compute.manager [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] No waiting events found dispatching network-vif-plugged-2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1826.017682] env[62525]: WARNING nova.compute.manager [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Received unexpected event network-vif-plugged-2e4896a2-e40f-4f74-8b88-e93af562023a for instance with vm_state building and task_state spawning. [ 1826.017842] env[62525]: DEBUG nova.compute.manager [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Received event network-changed-2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1826.018009] env[62525]: DEBUG nova.compute.manager [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Refreshing instance network info cache due to event network-changed-2e4896a2-e40f-4f74-8b88-e93af562023a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1826.018270] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Acquiring lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.018416] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Acquired lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.018575] env[62525]: DEBUG nova.network.neutron [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Refreshing network info cache for port 2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1826.028202] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.030771] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.671s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.031027] env[62525]: DEBUG nova.objects.instance [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lazy-loading 'resources' on Instance uuid 467c6af1-2961-4213-8f0c-fe7591d93b5d {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.061690] env[62525]: INFO nova.scheduler.client.report [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted allocations for instance c14f3fb8-3090-4df3-9e78-57ee9d62921f [ 1826.082121] env[62525]: DEBUG oslo_concurrency.lockutils [req-61a2c8f2-f9a8-4e28-9353-5600e0d8e870 req-874d6e8f-12bd-4a88-a7f6-63351fb7e017 service nova] Releasing lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.120055] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.120055] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1826.120055] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120055] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120388] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120388] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120580] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120731] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.120992] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1826.121321] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.171950] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.239895] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782122, 'name': CreateVM_Task, 'duration_secs': 0.88759} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.240095] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.240812] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.240988] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.241368] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.241646] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ee9fc33-254b-4fbd-8e75-3a9a35518b83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.247786] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1826.247786] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cea0a1-b72d-e3a8-3a64-f6fb3b6c047a" [ 1826.247786] env[62525]: _type = "Task" [ 1826.247786] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.256567] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cea0a1-b72d-e3a8-3a64-f6fb3b6c047a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.273082] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.361306] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e21fea-96ae-c82b-f6a8-0e0dadc20311, 'name': SearchDatastore_Task, 'duration_secs': 0.011632} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.362186] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0026fa6-80b1-49d3-b64e-dbe4c29c5e5a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.369415] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1826.369415] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52680a02-b7c8-1019-d63e-73f20c0d7cb9" [ 1826.369415] env[62525]: _type = "Task" [ 1826.369415] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.380094] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52680a02-b7c8-1019-d63e-73f20c0d7cb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.563296] env[62525]: DEBUG nova.network.neutron [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1826.572624] env[62525]: DEBUG oslo_concurrency.lockutils [None req-15175bb3-8286-4262-b424-07712e3410d8 tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "c14f3fb8-3090-4df3-9e78-57ee9d62921f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.533s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.624563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.675259] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.675714] env[62525]: DEBUG nova.network.neutron [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.765026] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cea0a1-b72d-e3a8-3a64-f6fb3b6c047a, 'name': SearchDatastore_Task, 'duration_secs': 0.101856} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.765340] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.765606] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.765838] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.785121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba2f64f-54d8-4f0c-bde3-7773aa168098 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.794818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7fdd29-1bc3-49db-970c-30635735232a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.832158] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ff15bb-0cf1-4f3f-968e-2ccd69c82c5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.841649] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75425f2-d942-4435-9a40-1d61c43ab385 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.859369] env[62525]: DEBUG nova.compute.provider_tree [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.883116] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52680a02-b7c8-1019-d63e-73f20c0d7cb9, 'name': SearchDatastore_Task, 'duration_secs': 0.115395} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.883523] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.883874] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b7768ee1-16f7-40f0-9f5f-28df4a1580f2/b7768ee1-16f7-40f0-9f5f-28df4a1580f2.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1826.884169] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.884447] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.884676] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0082b1c6-2233-4fdc-b6ca-2e863bea2f9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.887340] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2452e653-362a-479f-9a2f-d46a411b2982 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.895052] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1826.895052] env[62525]: value = "task-1782124" [ 1826.895052] env[62525]: _type = "Task" [ 1826.895052] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.904753] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.908180] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.908888] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.909728] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adea1b7d-ac48-4084-947c-f28a09563259 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.916100] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1826.916100] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526d8502-cf19-211c-ea21-99f35723b884" [ 1826.916100] env[62525]: _type = "Task" [ 1826.916100] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.925977] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526d8502-cf19-211c-ea21-99f35723b884, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.173037] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.178817] env[62525]: DEBUG oslo_concurrency.lockutils [req-7f0933c9-3108-4164-b4e1-b09a8fae64d1 req-50816abd-eeb3-4733-95ad-d844485748a3 service nova] Releasing lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.179493] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.179575] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1827.363149] env[62525]: DEBUG nova.scheduler.client.report [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1827.411554] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.433261] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526d8502-cf19-211c-ea21-99f35723b884, 'name': SearchDatastore_Task, 'duration_secs': 0.089724} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.434709] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbd4505-816d-4d27-ae39-72ca29d75fdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.443114] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1827.443114] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52323f5d-7572-cbf3-e445-9e79e4b9da6a" [ 1827.443114] env[62525]: _type = "Task" [ 1827.443114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.456394] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52323f5d-7572-cbf3-e445-9e79e4b9da6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.672831] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.716930] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1827.866645] env[62525]: DEBUG nova.network.neutron [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [{"id": "2e4896a2-e40f-4f74-8b88-e93af562023a", "address": "fa:16:3e:1f:ca:bc", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4896a2-e4", "ovs_interfaceid": "2e4896a2-e40f-4f74-8b88-e93af562023a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.870969] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.874344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.846s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.905957] env[62525]: INFO nova.scheduler.client.report [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Deleted allocations for instance 467c6af1-2961-4213-8f0c-fe7591d93b5d [ 1827.911616] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.956019] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52323f5d-7572-cbf3-e445-9e79e4b9da6a, 'name': SearchDatastore_Task, 'duration_secs': 0.097537} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.956375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.956609] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 42d14e44-44d6-46de-84e3-049a2d7e84f3/42d14e44-44d6-46de-84e3-049a2d7e84f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.957199] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0344b941-b5ad-48bd-b327-1c04cacb0a89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.967088] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1827.967088] env[62525]: value = "task-1782125" [ 1827.967088] env[62525]: _type = "Task" [ 1827.967088] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.978747] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.172623] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.376837] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.376837] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance network_info: |[{"id": "2e4896a2-e40f-4f74-8b88-e93af562023a", "address": "fa:16:3e:1f:ca:bc", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4896a2-e4", "ovs_interfaceid": "2e4896a2-e40f-4f74-8b88-e93af562023a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1828.376837] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:ca:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e4896a2-e40f-4f74-8b88-e93af562023a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1828.384793] env[62525]: DEBUG oslo.service.loopingcall [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1828.387995] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1828.388521] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab7f472f-07ed-45ca-b309-917527b6c6f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.423131] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782124, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.423616] env[62525]: DEBUG oslo_concurrency.lockutils [None req-660c75b2-eeb2-46f8-8210-86db76c8e90d tempest-ListServersNegativeTestJSON-1840569589 tempest-ListServersNegativeTestJSON-1840569589-project-member] Lock "467c6af1-2961-4213-8f0c-fe7591d93b5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.312s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.428386] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1828.428386] env[62525]: value = "task-1782126" [ 1828.428386] env[62525]: _type = "Task" [ 1828.428386] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.439111] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782126, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.478930] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.605394] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f245c-6c0b-41ae-9335-a08c48faece0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.614477] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6504ce-be51-4184-b201-977b884d7b0b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.649689] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d154b6-bb34-4ac2-bc1b-2b5f914ceb36 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.659288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb39e5f-58d3-4672-8a07-3bc30f081c2f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.678387] env[62525]: DEBUG nova.compute.provider_tree [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.685022] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782123, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.625779} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.685624] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4bcdf024-5b6a-4101-8dee-23681d55ab37/4bcdf024-5b6a-4101-8dee-23681d55ab37.vmdk to [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.686489] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619aae06-15b5-48b6-be64-d1a5fd6dc6dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.712020] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1828.712844] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b589e435-8af0-4756-80db-348000661fbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.734221] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1828.734221] env[62525]: value = "task-1782127" [ 1828.734221] env[62525]: _type = "Task" [ 1828.734221] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.746766] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782127, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.920995] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782124, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.931063} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.921694] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] b7768ee1-16f7-40f0-9f5f-28df4a1580f2/b7768ee1-16f7-40f0-9f5f-28df4a1580f2.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.921934] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1828.922276] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7db2cd42-72d5-41bb-ae0e-4f52ebda911f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.930224] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1828.930224] env[62525]: value = "task-1782128" [ 1828.930224] env[62525]: _type = "Task" [ 1828.930224] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.951320] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.951650] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782126, 'name': CreateVM_Task, 'duration_secs': 0.460988} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.951891] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1828.952653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.952906] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.953315] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1828.953683] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326111db-39df-4bfc-8996-909d688e8f2c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.960710] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1828.960710] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f41bb1-56d8-0847-c76b-970ca0614260" [ 1828.960710] env[62525]: _type = "Task" [ 1828.960710] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.971264] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f41bb1-56d8-0847-c76b-970ca0614260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.980867] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782125, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.186975] env[62525]: DEBUG nova.scheduler.client.report [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1829.245923] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782127, 'name': ReconfigVM_Task, 'duration_secs': 0.418411} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.246245] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfigured VM instance instance-0000004a to attach disk [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41/cb043ab8-dff7-48c6-b50b-a4d77a01eb41.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1829.247375] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'disk_bus': None, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'guest_format': None, 'encryption_format': None, 'encryption_options': None, 'boot_index': 0, 'device_type': 'disk', 'image_id': 'a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369843', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'name': 'volume-aa014b16-de19-45f8-9702-f93bf9cafd8f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'cb043ab8-dff7-48c6-b50b-a4d77a01eb41', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'serial': 'aa014b16-de19-45f8-9702-f93bf9cafd8f'}, 'device_type': None, 'mount_device': '/dev/sdb', 'attachment_id': '5085591e-5db5-4d50-9c50-8eac523076d1', 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62525) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1829.247582] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1829.248299] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369843', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'name': 'volume-aa014b16-de19-45f8-9702-f93bf9cafd8f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'cb043ab8-dff7-48c6-b50b-a4d77a01eb41', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'serial': 'aa014b16-de19-45f8-9702-f93bf9cafd8f'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1829.248562] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a401fc-4d2b-44a7-b755-f3df535f3a91 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.264630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a9363d-a8b2-4ad6-b705-71f2d6236406 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.289678] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] volume-aa014b16-de19-45f8-9702-f93bf9cafd8f/volume-aa014b16-de19-45f8-9702-f93bf9cafd8f.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.289970] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36fac313-90e3-4af1-9970-df0b956b0860 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.308631] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1829.308631] env[62525]: value = "task-1782129" [ 1829.308631] env[62525]: _type = "Task" [ 1829.308631] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.316842] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.445222] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087732} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.445517] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.446365] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784c2cdc-8965-46ca-9de2-747994269745 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.469365] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] b7768ee1-16f7-40f0-9f5f-28df4a1580f2/b7768ee1-16f7-40f0-9f5f-28df4a1580f2.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.469716] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-440ab4f9-d24f-4503-b66f-b0c932a79044 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.495154] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782125, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.360763} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.499653] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 42d14e44-44d6-46de-84e3-049a2d7e84f3/42d14e44-44d6-46de-84e3-049a2d7e84f3.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1829.499880] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1829.500229] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f41bb1-56d8-0847-c76b-970ca0614260, 'name': SearchDatastore_Task, 'duration_secs': 0.049347} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.500495] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1829.500495] env[62525]: value = "task-1782130" [ 1829.500495] env[62525]: _type = "Task" [ 1829.500495] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.500684] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eed76143-a1be-4860-b2e8-b82f69327f11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.503164] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.503392] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1829.503621] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.503920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.504152] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1829.504435] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25ad1267-4444-402c-9ac5-708543d3057e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.518415] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782130, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.518415] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1829.518415] env[62525]: value = "task-1782131" [ 1829.518415] env[62525]: _type = "Task" [ 1829.518415] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.524021] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1829.524021] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1829.524301] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40730778-4145-4caa-9fa8-a3581f685387 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.530489] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.533516] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1829.533516] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520ad1ca-0e90-06ad-9920-a56bdad7cb86" [ 1829.533516] env[62525]: _type = "Task" [ 1829.533516] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.542203] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ad1ca-0e90-06ad-9920-a56bdad7cb86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.823227] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.014781] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782130, 'name': ReconfigVM_Task, 'duration_secs': 0.333324} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.014912] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Reconfigured VM instance instance-0000006c to attach disk [datastore1] b7768ee1-16f7-40f0-9f5f-28df4a1580f2/b7768ee1-16f7-40f0-9f5f-28df4a1580f2.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.015581] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83d94bd1-df09-4548-a0f4-9560c4fde0b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.022722] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1830.022722] env[62525]: value = "task-1782132" [ 1830.022722] env[62525]: _type = "Task" [ 1830.022722] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.029455] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.034438] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782132, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.042468] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520ad1ca-0e90-06ad-9920-a56bdad7cb86, 'name': SearchDatastore_Task, 'duration_secs': 0.029966} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.043504] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-934eb6ee-31c9-446f-a158-4e052af96518 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.049172] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1830.049172] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529e7b31-e372-514b-6409-f5c138d09297" [ 1830.049172] env[62525]: _type = "Task" [ 1830.049172] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.057619] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529e7b31-e372-514b-6409-f5c138d09297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.197102] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.323s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.200057] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.576s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.200721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.200912] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1830.202100] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511b4ef6-e9a3-488c-9538-6bc0b511cb5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.210446] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5435c76-e29b-4e66-ab6a-d682c1e35021 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.228194] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87555c6f-d001-4408-a923-75a5e6d9e5c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.235032] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e42681d-25d7-498e-8b29-2715e57e7b81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.267761] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180161MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1830.267932] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.268153] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.324747] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782129, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.530288] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.667428} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.530868] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1830.531640] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b26a992-df3a-4af3-9e87-0a812badb941 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.537552] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782132, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.557535] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 42d14e44-44d6-46de-84e3-049a2d7e84f3/42d14e44-44d6-46de-84e3-049a2d7e84f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1830.557883] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0848e6c-fc7e-437b-bb41-d80495810fa6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.580666] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529e7b31-e372-514b-6409-f5c138d09297, 'name': SearchDatastore_Task, 'duration_secs': 0.011604} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.581836] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.582110] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/154ac489-69e4-41a8-90cf-b3d6196c4822.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1830.582429] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1830.582429] env[62525]: value = "task-1782133" [ 1830.582429] env[62525]: _type = "Task" [ 1830.582429] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.582613] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb77a913-9e31-45c9-8225-7ef8217e4bf7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.592023] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.593177] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1830.593177] env[62525]: value = "task-1782134" [ 1830.593177] env[62525]: _type = "Task" [ 1830.593177] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.600522] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.759292] env[62525]: INFO nova.scheduler.client.report [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocation for migration 3b8de5bf-1290-42c3-8fa6-f9777a7f3941 [ 1830.822871] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782129, 'name': ReconfigVM_Task, 'duration_secs': 1.11984} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.823182] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfigured VM instance instance-0000004a to attach disk [datastore1] volume-aa014b16-de19-45f8-9702-f93bf9cafd8f/volume-aa014b16-de19-45f8-9702-f93bf9cafd8f.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.828044] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13cbe89d-b20a-40d1-901c-ac225beebc12 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.846021] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1830.846021] env[62525]: value = "task-1782135" [ 1830.846021] env[62525]: _type = "Task" [ 1830.846021] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.854140] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.034440] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782132, 'name': Rename_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.094544] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782133, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.107142] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454888} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.107142] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/154ac489-69e4-41a8-90cf-b3d6196c4822.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1831.107142] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1831.107532] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-244a1bd2-6c55-4900-8590-eaddad0308ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.113792] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1831.113792] env[62525]: value = "task-1782136" [ 1831.113792] env[62525]: _type = "Task" [ 1831.113792] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.122153] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.267198] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6f57f8b4-8aa8-4e82-b703-9315ab0a9102 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.686s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 130a3015-6caf-4374-a35f-9dd49bb8b3bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 50ee564d-7b27-4bc4-a95e-7717de865cfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 160a67ea-5044-4597-9a61-82e05b8aa778 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 69a1093a-95d7-4cbb-90bf-1a213470872a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 61fa8887-db88-4adc-8c3f-ffc78e0e550d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5a40ca03-f61c-4232-80dc-7a745a34bc67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.303866] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance b7768ee1-16f7-40f0-9f5f-28df4a1580f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.304309] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 42d14e44-44d6-46de-84e3-049a2d7e84f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.304309] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 154ac489-69e4-41a8-90cf-b3d6196c4822 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1831.304399] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1831.304452] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1831.355449] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782135, 'name': ReconfigVM_Task, 'duration_secs': 0.237445} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.358166] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369843', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'name': 'volume-aa014b16-de19-45f8-9702-f93bf9cafd8f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'cb043ab8-dff7-48c6-b50b-a4d77a01eb41', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'serial': 'aa014b16-de19-45f8-9702-f93bf9cafd8f'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1831.359197] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70661eba-0214-4ef3-aa86-c0c55b2d2c9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.366159] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1831.366159] env[62525]: value = "task-1782137" [ 1831.366159] env[62525]: _type = "Task" [ 1831.366159] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.375748] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782137, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.482164] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ef76e5-d405-49ce-bf53-3403407c2573 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.493575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567cd30e-dab1-4048-a09e-4c60a753bbb2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.552764] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2b99f4-61a4-4208-b492-db8abf106087 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.568235] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782132, 'name': Rename_Task, 'duration_secs': 1.140474} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.570469] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65150f4-b9db-4c2c-a39c-72f34e233aaa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.577381] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1831.577711] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39804fb8-14a2-4016-a515-74ceb95a9bde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.598620] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1831.607330] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1831.607330] env[62525]: value = "task-1782138" [ 1831.607330] env[62525]: _type = "Task" [ 1831.607330] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.621208] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782133, 'name': ReconfigVM_Task, 'duration_secs': 0.522792} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.626612] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 42d14e44-44d6-46de-84e3-049a2d7e84f3/42d14e44-44d6-46de-84e3-049a2d7e84f3.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1831.632616] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbd1eff3-a118-4e73-946c-1f606159007b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.634856] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782138, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.647026] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103937} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.647026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1831.647026] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1831.647026] env[62525]: value = "task-1782139" [ 1831.647026] env[62525]: _type = "Task" [ 1831.647026] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.647026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf72b080-f538-49dc-a95c-992dde7b6d3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.685724] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/154ac489-69e4-41a8-90cf-b3d6196c4822.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1831.693826] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-838238d9-0ff6-4c10-abad-57c5403272e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.718103] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782139, 'name': Rename_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.724031] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1831.724031] env[62525]: value = "task-1782140" [ 1831.724031] env[62525]: _type = "Task" [ 1831.724031] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.734466] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782140, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.876139] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782137, 'name': Rename_Task, 'duration_secs': 0.292555} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.876429] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1831.876719] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77ccc263-bca8-4336-a3f8-17f502392ae1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.883800] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1831.883800] env[62525]: value = "task-1782141" [ 1831.883800] env[62525]: _type = "Task" [ 1831.883800] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.892618] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.120460] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782138, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.127038] env[62525]: ERROR nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [req-68a50838-7824-45d0-a2ca-f30d9854327c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-68a50838-7824-45d0-a2ca-f30d9854327c"}]} [ 1832.143909] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1832.157618] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1832.157807] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1832.162865] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782139, 'name': Rename_Task, 'duration_secs': 0.187346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.163132] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1832.163376] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5950b785-da09-422c-ab8c-0ea1e9860d1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.169526] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1832.169526] env[62525]: value = "task-1782142" [ 1832.169526] env[62525]: _type = "Task" [ 1832.169526] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.172954] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1832.179634] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.192699] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1832.237038] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782140, 'name': ReconfigVM_Task, 'duration_secs': 0.293287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.237406] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/154ac489-69e4-41a8-90cf-b3d6196c4822.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1832.238054] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3c19921-8c2c-402f-bda7-985112c2ca05 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.244998] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1832.244998] env[62525]: value = "task-1782143" [ 1832.244998] env[62525]: _type = "Task" [ 1832.244998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.254119] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782143, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.371660] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89843ef5-7edd-4a61-952c-463fcf09f950 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.379790] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc11e51-9906-4cd0-bc4c-5613f9edf060 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.414555] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477cab0d-9758-4abe-9f82-af2240315eb6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.420466] env[62525]: DEBUG oslo_vmware.api [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782141, 'name': PowerOnVM_Task, 'duration_secs': 0.466522} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.421172] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1832.427310] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8c523e-b680-49cc-a06b-0b55e96574da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.441560] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1832.534332] env[62525]: DEBUG nova.compute.manager [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1832.535571] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b115838e-79ef-4100-b4bf-cf23c46851a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.548605] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.548825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.622383] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782138, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.679066] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782142, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.757013] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782143, 'name': Rename_Task, 'duration_secs': 0.199228} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.757295] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1832.757542] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-611b8730-c1b6-4044-98bb-2dd573202a2f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.763988] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1832.763988] env[62525]: value = "task-1782144" [ 1832.763988] env[62525]: _type = "Task" [ 1832.763988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.771668] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.976676] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 147 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1832.977017] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 147 to 148 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1832.977191] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1833.053482] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1833.056581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.056967] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.057129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.057377] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.057536] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.061784] env[62525]: INFO nova.compute.manager [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Terminating instance [ 1833.063099] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cd5e43df-e670-4205-bf0e-09fc70c8e192 tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.346s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.064394] env[62525]: DEBUG nova.compute.manager [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1833.064588] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1833.068021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2968931-17af-42e6-97a9-860cb86f78a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.074649] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1833.075320] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92f64ddb-176d-4e4a-9209-238a062d768e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.079032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.079241] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.086168] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1833.086168] env[62525]: value = "task-1782145" [ 1833.086168] env[62525]: _type = "Task" [ 1833.086168] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.095313] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.121125] env[62525]: DEBUG oslo_vmware.api [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782138, 'name': PowerOnVM_Task, 'duration_secs': 1.267373} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.121453] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1833.121719] env[62525]: INFO nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Took 12.54 seconds to spawn the instance on the hypervisor. [ 1833.121976] env[62525]: DEBUG nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1833.122818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ec3a9-d0b8-403a-aa12-b236979b3fde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.181197] env[62525]: DEBUG oslo_vmware.api [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782142, 'name': PowerOnVM_Task, 'duration_secs': 0.889374} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.181603] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1833.181869] env[62525]: INFO nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Took 10.27 seconds to spawn the instance on the hypervisor. [ 1833.182115] env[62525]: DEBUG nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1833.183086] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54470cc-004c-47a7-8bb8-56ac9758a67f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.274491] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782144, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.479593] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.479837] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.485065] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1833.485267] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.217s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.581806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.581806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.583452] env[62525]: INFO nova.compute.claims [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1833.586480] env[62525]: DEBUG nova.compute.utils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1833.598298] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782145, 'name': PowerOffVM_Task, 'duration_secs': 0.27477} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.599081] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1833.599258] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1833.599502] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79818d35-f3cf-4d80-99f6-536e013e72dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.640953] env[62525]: INFO nova.compute.manager [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Took 18.38 seconds to build instance. [ 1833.699694] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1833.699910] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1833.700109] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] 5a40ca03-f61c-4232-80dc-7a745a34bc67 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.700413] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e301ea1e-6906-4075-8f66-ef4e7be30342 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.702486] env[62525]: INFO nova.compute.manager [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Took 18.40 seconds to build instance. [ 1833.708148] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1833.708148] env[62525]: value = "task-1782147" [ 1833.708148] env[62525]: _type = "Task" [ 1833.708148] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.716302] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.775037] env[62525]: DEBUG oslo_vmware.api [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782144, 'name': PowerOnVM_Task, 'duration_secs': 0.809989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.775316] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1833.775519] env[62525]: INFO nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Took 8.52 seconds to spawn the instance on the hypervisor. [ 1833.775696] env[62525]: DEBUG nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1833.776479] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400f5d93-8f52-46d5-ab54-39526c19d518 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.982649] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1834.093792] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.143056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-029ed3d3-4dd0-4994-85a3-1c961cebee84 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.892s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.205232] env[62525]: DEBUG oslo_concurrency.lockutils [None req-56b34da2-a346-4a27-8ad0-13ea4ac68a27 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.905s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.218784] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.293886] env[62525]: INFO nova.compute.manager [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Took 13.88 seconds to build instance. [ 1834.504917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.582985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.582985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.582985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.582985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.582985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.584386] env[62525]: INFO nova.compute.manager [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Terminating instance [ 1834.586230] env[62525]: DEBUG nova.compute.manager [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1834.586422] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.587278] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b880bd-b8b7-407c-9040-ba1e098e65f8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.598503] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1834.598809] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe8845c7-322f-4a77-9e14-12c5f97ac4c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.605050] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1834.605050] env[62525]: value = "task-1782148" [ 1834.605050] env[62525]: _type = "Task" [ 1834.605050] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.615829] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.687987] env[62525]: DEBUG nova.compute.manager [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.687987] env[62525]: DEBUG nova.compute.manager [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing instance network info cache due to event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1834.687987] env[62525]: DEBUG oslo_concurrency.lockutils [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] Acquiring lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.687987] env[62525]: DEBUG oslo_concurrency.lockutils [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] Acquired lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.687987] env[62525]: DEBUG nova.network.neutron [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1834.718000] env[62525]: DEBUG oslo_vmware.api [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.60955} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.720851] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1834.720851] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1834.720851] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1834.720999] env[62525]: INFO nova.compute.manager [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1834.721233] env[62525]: DEBUG oslo.service.loopingcall [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.721628] env[62525]: DEBUG nova.compute.manager [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1834.721727] env[62525]: DEBUG nova.network.neutron [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1834.795656] env[62525]: DEBUG oslo_concurrency.lockutils [None req-86ea902f-975b-4f2e-a398-1ae3133cef01 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.389s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.807595] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13c10c8-61e5-4f86-8322-dd6a2bb7de27 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.815443] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4c176c-d857-44d4-a18b-7e895344b249 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.847012] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55c4409-5153-469b-bd38-73bd8d00c00f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.855031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bc5bc0-8387-4e9d-8146-c9cc0bdd5194 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.872863] env[62525]: DEBUG nova.compute.provider_tree [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.024208] env[62525]: DEBUG nova.compute.manager [req-6da3236e-5bc3-4080-bffe-17509d445bce req-cf7334f3-7d37-4fc4-8730-773206caf29a service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Received event network-vif-deleted-6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1835.024533] env[62525]: INFO nova.compute.manager [req-6da3236e-5bc3-4080-bffe-17509d445bce req-cf7334f3-7d37-4fc4-8730-773206caf29a service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Neutron deleted interface 6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e; detaching it from the instance and deleting it from the info cache [ 1835.024703] env[62525]: DEBUG nova.network.neutron [req-6da3236e-5bc3-4080-bffe-17509d445bce req-cf7334f3-7d37-4fc4-8730-773206caf29a service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.115966] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782148, 'name': PowerOffVM_Task, 'duration_secs': 0.247784} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.116342] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.116415] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.116862] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0be74422-956a-45ac-8980-bde5e858b12e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.171192] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.171545] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.171828] env[62525]: INFO nova.compute.manager [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Attaching volume a734d476-cf33-4b2d-b480-37fa8aef837b to /dev/sdb [ 1835.211601] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf53b7c3-4af7-45b5-9193-84e4c231009f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.219704] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb268dc-86f7-40dc-8de6-91fca70c507b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.236817] env[62525]: DEBUG nova.virt.block_device [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updating existing volume attachment record: f7d1b833-1f53-4ec8-8b40-13c7e960fea3 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1835.299898] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.300084] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.301012] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] b7768ee1-16f7-40f0-9f5f-28df4a1580f2 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.301012] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b49ba4f-2d39-4bc9-a38c-d88518ba399c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.310025] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1835.310025] env[62525]: value = "task-1782150" [ 1835.310025] env[62525]: _type = "Task" [ 1835.310025] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.318275] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.365110] env[62525]: INFO nova.compute.manager [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Rescuing [ 1835.365711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.365711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.365894] env[62525]: DEBUG nova.network.neutron [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1835.376416] env[62525]: DEBUG nova.scheduler.client.report [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1835.470039] env[62525]: DEBUG nova.network.neutron [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updated VIF entry in instance network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1835.470039] env[62525]: DEBUG nova.network.neutron [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [{"id": "5e23aff4-06c8-4549-b425-5b83423352ce", "address": "fa:16:3e:6e:b4:41", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e23aff4-06", "ovs_interfaceid": "5e23aff4-06c8-4549-b425-5b83423352ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.480691] env[62525]: DEBUG nova.network.neutron [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.528801] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-761f2c32-928c-4e1a-83f1-ca0f41d6be31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.537898] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69b45af-05bb-4c13-9c87-ec7714590a1e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.571150] env[62525]: DEBUG nova.compute.manager [req-6da3236e-5bc3-4080-bffe-17509d445bce req-cf7334f3-7d37-4fc4-8730-773206caf29a service nova] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Detach interface failed, port_id=6c0d4e5d-c0af-49d7-8b58-d85a4e2a815e, reason: Instance 5a40ca03-f61c-4232-80dc-7a745a34bc67 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1835.821327] env[62525]: DEBUG oslo_vmware.api [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183193} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.821327] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1835.821671] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1835.821730] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.822335] env[62525]: INFO nova.compute.manager [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1835.822335] env[62525]: DEBUG oslo.service.loopingcall [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.822464] env[62525]: DEBUG nova.compute.manager [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1835.822554] env[62525]: DEBUG nova.network.neutron [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.881657] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.881657] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1835.886433] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.886433] env[62525]: INFO nova.compute.claims [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.973148] env[62525]: DEBUG oslo_concurrency.lockutils [req-b36aee6e-3223-4bdb-b7e1-335e6dfc1c28 req-f2bdc040-ff45-4b39-93bc-4737966615b1 service nova] Releasing lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.982793] env[62525]: INFO nova.compute.manager [-] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Took 1.26 seconds to deallocate network for instance. [ 1836.187374] env[62525]: DEBUG nova.network.neutron [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [{"id": "2e4896a2-e40f-4f74-8b88-e93af562023a", "address": "fa:16:3e:1f:ca:bc", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4896a2-e4", "ovs_interfaceid": "2e4896a2-e40f-4f74-8b88-e93af562023a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.390533] env[62525]: DEBUG nova.compute.utils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1836.394223] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Not allocating networking since 'none' was specified. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1836.492322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.690590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.714303] env[62525]: DEBUG nova.compute.manager [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1836.714507] env[62525]: DEBUG nova.compute.manager [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing instance network info cache due to event network-changed-5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1836.714691] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] Acquiring lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.714840] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] Acquired lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.714999] env[62525]: DEBUG nova.network.neutron [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Refreshing network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.745013] env[62525]: DEBUG nova.network.neutron [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.895615] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1837.051222] env[62525]: DEBUG nova.compute.manager [req-50fd65b1-7756-42dd-a3fd-ae29e808097c req-6c73ffc5-3d51-4bd5-bfd0-9b971079b8bb service nova] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Received event network-vif-deleted-fa019fa8-6752-4b33-877c-63d55cadbf80 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.083690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c86f0c-3dee-4781-a854-8ed9cd3f3bee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.092188] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab16092-f208-4510-9bc7-8431f30ec51e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.123755] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af105644-b07a-4b21-a0db-e2a6b4e63b7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.131643] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f56d75-7e76-4b73-83c4-41a64d31331b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.145496] env[62525]: DEBUG nova.compute.provider_tree [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1837.229765] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1837.230038] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bf0c7b1-492d-476c-ac72-b59c46204d95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.239768] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1837.239768] env[62525]: value = "task-1782152" [ 1837.239768] env[62525]: _type = "Task" [ 1837.239768] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.248028] env[62525]: INFO nova.compute.manager [-] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Took 1.43 seconds to deallocate network for instance. [ 1837.248253] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782152, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.602388] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "13020656-4e4f-40ee-a77a-fd64ae340e09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.602661] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.631417] env[62525]: DEBUG nova.network.neutron [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updated VIF entry in instance network info cache for port 5e23aff4-06c8-4549-b425-5b83423352ce. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1837.631860] env[62525]: DEBUG nova.network.neutron [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [{"id": "5e23aff4-06c8-4549-b425-5b83423352ce", "address": "fa:16:3e:6e:b4:41", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e23aff4-06", "ovs_interfaceid": "5e23aff4-06c8-4549-b425-5b83423352ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.665439] env[62525]: ERROR nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [req-d0ff0ada-b89e-4a6c-889f-ac46580ed244] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d0ff0ada-b89e-4a6c-889f-ac46580ed244"}]} [ 1837.681995] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1837.698133] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1837.698406] env[62525]: DEBUG nova.compute.provider_tree [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1837.709618] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1837.725638] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1837.752921] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782152, 'name': PowerOffVM_Task, 'duration_secs': 0.363729} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.752921] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1837.753288] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb95157-7d63-42ab-ad90-5e2fa1a164f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.757682] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.451600] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1838.453763] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1838.456202] env[62525]: DEBUG oslo_concurrency.lockutils [req-1e1ba315-a823-4abd-8f1a-66dca69db908 req-b67b4655-ee4b-4e6b-90bf-6c2f216cea2f service nova] Releasing lock "refresh_cache-42d14e44-44d6-46de-84e3-049a2d7e84f3" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.461511] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69ea341-60a9-4183-be7b-cc2873750459 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.490737] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1838.491071] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1838.491183] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1838.491364] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1838.491511] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1838.491663] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1838.491864] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1838.492030] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1838.492215] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1838.492423] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1838.492609] env[62525]: DEBUG nova.virt.hardware [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1838.494546] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fc4d77-bf51-4a3a-9298-70b6a573f860 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.508764] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1838.509970] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eefd088-cf31-49a1-93ef-7f793aefa89e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.513964] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e848d9-014e-4398-9ae9-271cc68cde80 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.527337] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance VIF info [] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1838.532889] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Creating folder: Project (e4f67c17a2fe4126befc938516a101f1). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.536041] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f4d01a7-fb1c-4107-8f18-9778db9fe7f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.537518] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1838.537518] env[62525]: value = "task-1782154" [ 1838.537518] env[62525]: _type = "Task" [ 1838.537518] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.546628] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1838.546819] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1838.547062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.547217] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.547396] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1838.547622] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Created folder: Project (e4f67c17a2fe4126befc938516a101f1) in parent group-v369553. [ 1838.547783] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Creating folder: Instances. Parent ref: group-v369854. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.549902] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a52c611-8eca-46df-ad36-2153c212fc4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.551432] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b051c2d4-b3f1-4114-91c3-258ead93c529 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.560362] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Created folder: Instances in parent group-v369854. [ 1838.560637] env[62525]: DEBUG oslo.service.loopingcall [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1838.561546] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1838.561795] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1838.561958] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1838.564589] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87fdaf6f-2774-48e2-9f72-ef78a17871e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.575825] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab27725f-126c-430a-af01-0674ebc1e253 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.581391] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1838.581391] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522cf2d5-cfcc-b4dc-52c3-974ba17cbaa6" [ 1838.581391] env[62525]: _type = "Task" [ 1838.581391] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.585029] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1838.585029] env[62525]: value = "task-1782157" [ 1838.585029] env[62525]: _type = "Task" [ 1838.585029] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.593027] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522cf2d5-cfcc-b4dc-52c3-974ba17cbaa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.601357] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782157, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.667259] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6a88ce-5f0a-4870-bb8f-f50e62b5e730 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.676742] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bb58bc-359d-469b-9a2a-1975fc3ff573 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.709446] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289676b3-16de-491c-ba86-90832f5cd7ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.717630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f5576d-b909-4832-a257-8b305da5e0dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.731156] env[62525]: DEBUG nova.compute.provider_tree [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1838.975973] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.092144] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522cf2d5-cfcc-b4dc-52c3-974ba17cbaa6, 'name': SearchDatastore_Task, 'duration_secs': 0.021244} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.093261] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccfa3368-c322-4191-8d2b-0aa560bd8f3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.098986] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782157, 'name': CreateVM_Task, 'duration_secs': 0.269867} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.098986] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1839.099195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.099348] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.099878] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1839.100114] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad6f48c-a53c-42ab-bca4-4819822919aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.104541] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1839.104541] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5215614a-570d-d730-dd49-df5026c7829c" [ 1839.104541] env[62525]: _type = "Task" [ 1839.104541] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.107150] env[62525]: DEBUG nova.compute.manager [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1839.113944] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1839.113944] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d4034d-77e8-bc8a-3ae2-0d2034dd68dd" [ 1839.113944] env[62525]: _type = "Task" [ 1839.113944] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.120594] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5215614a-570d-d730-dd49-df5026c7829c, 'name': SearchDatastore_Task, 'duration_secs': 0.009289} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.121132] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.121387] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. {{(pid=62525) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1839.121950] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0555298a-5eda-4f2f-a51e-2c27cf16a5c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.127884] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d4034d-77e8-bc8a-3ae2-0d2034dd68dd, 'name': SearchDatastore_Task, 'duration_secs': 0.008942} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.128496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.128736] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1839.128966] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.129128] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.129307] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.129540] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7550d694-863b-4d66-8dba-02bbc4834435 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.132371] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1839.132371] env[62525]: value = "task-1782158" [ 1839.132371] env[62525]: _type = "Task" [ 1839.132371] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.136588] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.136765] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1839.141353] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d74e2ba4-c165-45b8-a8a2-ba5d4079d3f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.143411] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.146329] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1839.146329] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522ee112-d386-f483-d103-35fe04c58707" [ 1839.146329] env[62525]: _type = "Task" [ 1839.146329] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.154919] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522ee112-d386-f483-d103-35fe04c58707, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.251740] env[62525]: ERROR nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [req-083a6a48-3111-4b26-aa82-d30bb031a07a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-083a6a48-3111-4b26-aa82-d30bb031a07a"}]} [ 1839.267319] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1839.280599] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1839.280873] env[62525]: DEBUG nova.compute.provider_tree [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1839.294214] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1839.314756] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1839.541863] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1947b501-aa0b-45a1-8989-d181ec950263 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.549144] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375efe09-d299-4884-a5ce-78908105c3aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.578777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66787e5b-7b75-4c9f-a0b8-4dd0993356b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.585998] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5b03fd-ef37-40ef-ba14-e81b326654ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.598670] env[62525]: DEBUG nova.compute.provider_tree [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.626098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.641737] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782158, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.408853} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.641974] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk. [ 1839.642803] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8eb471-27d9-446c-8133-73dfff2577e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.668163] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1839.671206] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc901b2d-2863-4a5d-9892-d1d73873ffd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.689729] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522ee112-d386-f483-d103-35fe04c58707, 'name': SearchDatastore_Task, 'duration_secs': 0.008061} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.691437] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1839.691437] env[62525]: value = "task-1782159" [ 1839.691437] env[62525]: _type = "Task" [ 1839.691437] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.691618] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fd7b7cc-86dd-45a0-8058-c655767abf98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.699540] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1839.699540] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527ad91e-c8f2-074f-1082-3d81f97c4d1d" [ 1839.699540] env[62525]: _type = "Task" [ 1839.699540] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.702559] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782159, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.710272] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ad91e-c8f2-074f-1082-3d81f97c4d1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.787961] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1839.788243] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369853', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'name': 'volume-a734d476-cf33-4b2d-b480-37fa8aef837b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '160a67ea-5044-4597-9a61-82e05b8aa778', 'attached_at': '', 'detached_at': '', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'serial': 'a734d476-cf33-4b2d-b480-37fa8aef837b'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1839.789150] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ccd14d-1e08-44f0-bddd-430016144e2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.805051] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036a0fe8-0e64-4846-b7f2-e25e0ce70558 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.828879] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-a734d476-cf33-4b2d-b480-37fa8aef837b/volume-a734d476-cf33-4b2d-b480-37fa8aef837b.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1839.829176] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f478c72-565a-4772-b7b4-25400e8875c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.848283] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1839.848283] env[62525]: value = "task-1782160" [ 1839.848283] env[62525]: _type = "Task" [ 1839.848283] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.856263] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.102323] env[62525]: DEBUG nova.scheduler.client.report [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1840.203047] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782159, 'name': ReconfigVM_Task, 'duration_secs': 0.284297} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.206118] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36-rescue.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1840.206941] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dca05a-59c8-443a-bb91-fe152b7c6f49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.214922] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527ad91e-c8f2-074f-1082-3d81f97c4d1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010632} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.230529] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.230829] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7c177d17-d0fe-4df6-900d-e1a6118bc79e/7c177d17-d0fe-4df6-900d-e1a6118bc79e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1840.235776] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27625306-7d24-4d51-a186-23a5976ce85d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.237828] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a894d627-adc0-4d8b-870b-c8f21ba4b5a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.255718] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1840.255718] env[62525]: value = "task-1782162" [ 1840.255718] env[62525]: _type = "Task" [ 1840.255718] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.256568] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1840.256568] env[62525]: value = "task-1782161" [ 1840.256568] env[62525]: _type = "Task" [ 1840.256568] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.267793] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.271033] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.358583] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782160, 'name': ReconfigVM_Task, 'duration_secs': 0.353271} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.358822] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-a734d476-cf33-4b2d-b480-37fa8aef837b/volume-a734d476-cf33-4b2d-b480-37fa8aef837b.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1840.363769] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-288cf80d-458c-4e70-812f-62f25a511366 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.379597] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1840.379597] env[62525]: value = "task-1782163" [ 1840.379597] env[62525]: _type = "Task" [ 1840.379597] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.388207] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782163, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.608032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.723s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.608802] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1840.613266] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.121s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.613697] env[62525]: DEBUG nova.objects.instance [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'resources' on Instance uuid 5a40ca03-f61c-4232-80dc-7a745a34bc67 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1840.770580] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782161, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.774022] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782162, 'name': ReconfigVM_Task, 'duration_secs': 0.165337} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.774333] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1840.774613] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f33ef213-d2ee-4cc5-8cc4-92b0d69e8769 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.782199] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1840.782199] env[62525]: value = "task-1782164" [ 1840.782199] env[62525]: _type = "Task" [ 1840.782199] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.790951] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.890329] env[62525]: DEBUG oslo_vmware.api [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782163, 'name': ReconfigVM_Task, 'duration_secs': 0.138397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.890866] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369853', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'name': 'volume-a734d476-cf33-4b2d-b480-37fa8aef837b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '160a67ea-5044-4597-9a61-82e05b8aa778', 'attached_at': '', 'detached_at': '', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'serial': 'a734d476-cf33-4b2d-b480-37fa8aef837b'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1841.117914] env[62525]: DEBUG nova.compute.utils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.122237] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1841.122430] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1841.177653] env[62525]: DEBUG nova.policy [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e50433248fb4eb088e90d25fcb67c7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3d5c15d37145aa84818a2ad88f307f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1841.271742] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652219} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.272048] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 7c177d17-d0fe-4df6-900d-e1a6118bc79e/7c177d17-d0fe-4df6-900d-e1a6118bc79e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1841.272373] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1841.272596] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a24689f8-5875-4898-87e8-22fea91bbde4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.279350] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1841.279350] env[62525]: value = "task-1782165" [ 1841.279350] env[62525]: _type = "Task" [ 1841.279350] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.296063] env[62525]: DEBUG oslo_vmware.api [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782164, 'name': PowerOnVM_Task, 'duration_secs': 0.412448} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.296288] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.296605] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1841.299526] env[62525]: DEBUG nova.compute.manager [None req-f7aa4a3c-360d-456e-ad44-dc0d1020f345 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1841.300346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b342f477-3467-495e-be6f-b631dc654050 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.443464] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b885d208-e265-4ee9-8ffb-275b793820aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.454201] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f21512-c583-4fcd-b45d-3c55e062d59e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.486176] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Successfully created port: d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1841.488596] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c145f96-6db6-482b-abd5-a8cdb9d058e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.496761] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49fb1d0-e461-438f-b16f-892d66f5dd8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.512049] env[62525]: DEBUG nova.compute.provider_tree [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.622908] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1841.791854] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077765} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.792258] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1841.793369] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab8021d-769e-4f48-af48-c3f9ad3bb33d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.822702] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 7c177d17-d0fe-4df6-900d-e1a6118bc79e/7c177d17-d0fe-4df6-900d-e1a6118bc79e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1841.825984] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cdf8ac1-1ea3-48a2-8195-cde94b96106b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.856064] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1841.856064] env[62525]: value = "task-1782166" [ 1841.856064] env[62525]: _type = "Task" [ 1841.856064] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.878350] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.926946] env[62525]: DEBUG nova.objects.instance [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid 160a67ea-5044-4597-9a61-82e05b8aa778 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1842.015101] env[62525]: DEBUG nova.scheduler.client.report [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1842.366903] env[62525]: INFO nova.compute.manager [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Unrescuing [ 1842.367174] env[62525]: DEBUG oslo_concurrency.lockutils [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.367328] env[62525]: DEBUG oslo_concurrency.lockutils [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquired lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.367487] env[62525]: DEBUG nova.network.neutron [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1842.368688] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782166, 'name': ReconfigVM_Task, 'duration_secs': 0.258951} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.369152] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 7c177d17-d0fe-4df6-900d-e1a6118bc79e/7c177d17-d0fe-4df6-900d-e1a6118bc79e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1842.369750] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c5999bd-ed44-4ea7-8ed4-8347008af7fb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.376850] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1842.376850] env[62525]: value = "task-1782167" [ 1842.376850] env[62525]: _type = "Task" [ 1842.376850] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.385147] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782167, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.432923] env[62525]: DEBUG oslo_concurrency.lockutils [None req-27f69465-8665-4427-b841-b49f90e8443c tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.261s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.521711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.524496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.767s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.524740] env[62525]: DEBUG nova.objects.instance [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid b7768ee1-16f7-40f0-9f5f-28df4a1580f2 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1842.545219] env[62525]: INFO nova.scheduler.client.report [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocations for instance 5a40ca03-f61c-4232-80dc-7a745a34bc67 [ 1842.632720] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1842.657034] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1842.657321] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1842.657481] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1842.657662] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1842.657807] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1842.657953] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1842.658171] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1842.658332] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1842.658494] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1842.658660] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1842.658826] env[62525]: DEBUG nova.virt.hardware [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1842.659952] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2111be14-f5ce-4596-befe-55ebce9a6f8a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.667533] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.667765] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.670652] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd292066-d70e-4400-8ae5-f709147ae67c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.883271] env[62525]: DEBUG nova.compute.manager [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Received event network-vif-plugged-d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1842.883883] env[62525]: DEBUG oslo_concurrency.lockutils [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.884153] env[62525]: DEBUG oslo_concurrency.lockutils [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.884334] env[62525]: DEBUG oslo_concurrency.lockutils [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.884507] env[62525]: DEBUG nova.compute.manager [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] No waiting events found dispatching network-vif-plugged-d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1842.884677] env[62525]: WARNING nova.compute.manager [req-907f4868-bbf4-4bf8-9c35-d68423607c2a req-0afaca6d-8fb9-404a-8f67-447bf4020dab service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Received unexpected event network-vif-plugged-d060e85e-b0a9-45db-8fb4-2994f45e01f5 for instance with vm_state building and task_state spawning. [ 1842.895139] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782167, 'name': Rename_Task, 'duration_secs': 0.251138} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.895416] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1842.895735] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b85a0525-7f0e-40fb-858a-14dd8e981aba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.901907] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1842.901907] env[62525]: value = "task-1782168" [ 1842.901907] env[62525]: _type = "Task" [ 1842.901907] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.909406] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.056321] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0d81f1ad-ccb2-40a8-89bc-9ce1cc7c888a tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "5a40ca03-f61c-4232-80dc-7a745a34bc67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.999s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.074215] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Successfully updated port: d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.135073] env[62525]: DEBUG nova.network.neutron [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [{"id": "2e4896a2-e40f-4f74-8b88-e93af562023a", "address": "fa:16:3e:1f:ca:bc", "network": {"id": "c00d17b1-deb6-417d-bcd0-50dd5ac9d968", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1599517315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d3fe67f961db46b9b3e2c37789829a2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4896a2-e4", "ovs_interfaceid": "2e4896a2-e40f-4f74-8b88-e93af562023a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.175774] env[62525]: INFO nova.compute.manager [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Detaching volume a734d476-cf33-4b2d-b480-37fa8aef837b [ 1843.215262] env[62525]: INFO nova.virt.block_device [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Attempting to driver detach volume a734d476-cf33-4b2d-b480-37fa8aef837b from mountpoint /dev/sdb [ 1843.215556] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1843.215754] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369853', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'name': 'volume-a734d476-cf33-4b2d-b480-37fa8aef837b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '160a67ea-5044-4597-9a61-82e05b8aa778', 'attached_at': '', 'detached_at': '', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'serial': 'a734d476-cf33-4b2d-b480-37fa8aef837b'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1843.218480] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f449c0a9-7e42-415c-a4c9-713c48f6418e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.242925] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61d1858-42ee-4bdc-b3d2-84fe1b35ec59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.250731] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896be459-2ff4-4d73-81f9-16b25f8d1031 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.255571] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15defafd-cf9c-4670-815f-0839a7ef2017 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.276223] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e436e48f-c0aa-48d8-9b8c-bebfff2f2e37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.282360] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aebfa54-b677-4cf7-954e-6dc6b2fcfd58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.297750] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] The volume has not been displaced from its original location: [datastore1] volume-a734d476-cf33-4b2d-b480-37fa8aef837b/volume-a734d476-cf33-4b2d-b480-37fa8aef837b.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1843.303304] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1843.304106] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9d42c7d-6778-453e-96da-8b1fd7611be6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.343402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6c6841-7b28-4a69-959a-3017cbe8cdcf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.347448] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1843.347448] env[62525]: value = "task-1782169" [ 1843.347448] env[62525]: _type = "Task" [ 1843.347448] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.354772] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348ed401-d4c8-4f37-9a03-f66d3e7e6069 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.362451] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.373309] env[62525]: DEBUG nova.compute.provider_tree [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.412200] env[62525]: DEBUG oslo_vmware.api [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782168, 'name': PowerOnVM_Task, 'duration_secs': 0.411471} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.412461] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1843.412690] env[62525]: INFO nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Took 4.96 seconds to spawn the instance on the hypervisor. [ 1843.412901] env[62525]: DEBUG nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1843.413669] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a6c2c7-f6ac-43df-9122-35d22d20fc8d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.578279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.578441] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.578751] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.639955] env[62525]: DEBUG oslo_concurrency.lockutils [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Releasing lock "refresh_cache-154ac489-69e4-41a8-90cf-b3d6196c4822" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.640707] env[62525]: DEBUG nova.objects.instance [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lazy-loading 'flavor' on Instance uuid 154ac489-69e4-41a8-90cf-b3d6196c4822 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.860065] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782169, 'name': ReconfigVM_Task, 'duration_secs': 0.219594} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.860392] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1843.865264] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12800455-6b64-402b-a172-442f4337efb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.879327] env[62525]: DEBUG nova.scheduler.client.report [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1843.883940] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1843.883940] env[62525]: value = "task-1782170" [ 1843.883940] env[62525]: _type = "Task" [ 1843.883940] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.893950] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782170, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.932822] env[62525]: INFO nova.compute.manager [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Took 10.37 seconds to build instance. [ 1844.122823] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.146324] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e852b5ff-b218-4f4f-8db4-d11d298a65a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.168808] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.171539] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae857486-40d2-4a3c-9ae5-8eb4000b7552 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.179321] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1844.179321] env[62525]: value = "task-1782171" [ 1844.179321] env[62525]: _type = "Task" [ 1844.179321] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.187435] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.276707] env[62525]: DEBUG nova.network.neutron [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.386116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.389220] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.413s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.391055] env[62525]: INFO nova.compute.claims [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1844.403028] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782170, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.408756] env[62525]: INFO nova.scheduler.client.report [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance b7768ee1-16f7-40f0-9f5f-28df4a1580f2 [ 1844.437019] env[62525]: DEBUG oslo_concurrency.lockutils [None req-4360ec4b-6733-4b98-bab5-48e3c99ab554 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.886s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.690696] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782171, 'name': PowerOffVM_Task, 'duration_secs': 0.217987} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.690999] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1844.696241] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1844.696524] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d85fc25-1b9c-4896-90e5-fae1d291dec6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.714205] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1844.714205] env[62525]: value = "task-1782172" [ 1844.714205] env[62525]: _type = "Task" [ 1844.714205] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.722180] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782172, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.779985] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.780305] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Instance network_info: |[{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1844.780867] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:82:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd060e85e-b0a9-45db-8fb4-2994f45e01f5', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1844.789454] env[62525]: DEBUG oslo.service.loopingcall [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.789454] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1844.789726] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f28c158f-71e8-441b-b22d-e95b24ec164a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.808832] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1844.808832] env[62525]: value = "task-1782173" [ 1844.808832] env[62525]: _type = "Task" [ 1844.808832] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.816236] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782173, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.845086] env[62525]: DEBUG nova.compute.manager [None req-fbf2b815-9d65-42c6-ab32-7910dafa435b tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1844.845975] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04fe610-8513-41ea-9bfd-02f112005f46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.900718] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782170, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.918261] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.918495] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.918732] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.918918] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.919098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.920939] env[62525]: DEBUG oslo_concurrency.lockutils [None req-dcf9216d-31c9-4836-9918-9793cef741d1 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "b7768ee1-16f7-40f0-9f5f-28df4a1580f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.339s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.922674] env[62525]: INFO nova.compute.manager [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Terminating instance [ 1844.924037] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "refresh_cache-7c177d17-d0fe-4df6-900d-e1a6118bc79e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.924216] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquired lock "refresh_cache-7c177d17-d0fe-4df6-900d-e1a6118bc79e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.924388] env[62525]: DEBUG nova.network.neutron [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1844.947615] env[62525]: DEBUG nova.compute.manager [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Received event network-changed-d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.947775] env[62525]: DEBUG nova.compute.manager [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Refreshing instance network info cache due to event network-changed-d060e85e-b0a9-45db-8fb4-2994f45e01f5. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1844.947960] env[62525]: DEBUG oslo_concurrency.lockutils [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] Acquiring lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.948086] env[62525]: DEBUG oslo_concurrency.lockutils [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] Acquired lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.948253] env[62525]: DEBUG nova.network.neutron [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Refreshing network info cache for port d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.224103] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782172, 'name': ReconfigVM_Task, 'duration_secs': 0.269147} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.224438] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1845.224626] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1845.224872] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-591d4ff5-bec2-48ef-bb35-49b2e332886b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.232252] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1845.232252] env[62525]: value = "task-1782174" [ 1845.232252] env[62525]: _type = "Task" [ 1845.232252] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.239791] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.319255] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782173, 'name': CreateVM_Task, 'duration_secs': 0.331719} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.319663] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1845.320380] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.320556] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.320970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1845.321302] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18de6de7-d665-4aa2-9b9b-157d1b6d20d0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.325567] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1845.325567] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f21989-7d04-79c0-1373-c4a4f1e0b669" [ 1845.325567] env[62525]: _type = "Task" [ 1845.325567] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.332783] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f21989-7d04-79c0-1373-c4a4f1e0b669, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.355606] env[62525]: INFO nova.compute.manager [None req-fbf2b815-9d65-42c6-ab32-7910dafa435b tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] instance snapshotting [ 1845.356176] env[62525]: DEBUG nova.objects.instance [None req-fbf2b815-9d65-42c6-ab32-7910dafa435b tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lazy-loading 'flavor' on Instance uuid 7c177d17-d0fe-4df6-900d-e1a6118bc79e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1845.400019] env[62525]: DEBUG oslo_vmware.api [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782170, 'name': ReconfigVM_Task, 'duration_secs': 1.185123} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.400376] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369853', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'name': 'volume-a734d476-cf33-4b2d-b480-37fa8aef837b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '160a67ea-5044-4597-9a61-82e05b8aa778', 'attached_at': '', 'detached_at': '', 'volume_id': 'a734d476-cf33-4b2d-b480-37fa8aef837b', 'serial': 'a734d476-cf33-4b2d-b480-37fa8aef837b'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1845.443200] env[62525]: DEBUG nova.network.neutron [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1845.463032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.463032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.463032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.463032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.463032] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.465495] env[62525]: INFO nova.compute.manager [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Terminating instance [ 1845.467246] env[62525]: DEBUG nova.compute.manager [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1845.467435] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.468259] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ca2a07-6e5d-4f82-a8e0-44f25ba73187 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.478753] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.479020] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7d77143-1ed7-4e1f-9f58-06008b31e952 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.485829] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1845.485829] env[62525]: value = "task-1782175" [ 1845.485829] env[62525]: _type = "Task" [ 1845.485829] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.493810] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.532281] env[62525]: DEBUG nova.network.neutron [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.595080] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4305b2c0-7e31-499f-8095-7fc7f5b6a782 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.604065] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857d225b-c302-46c8-bb63-e9f18e20f8ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.640173] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ff7f5f-265e-4527-8275-b2ee689ca1a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.648436] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d5f756-2492-4ce7-92de-cd3443f5d11b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.662224] env[62525]: DEBUG nova.compute.provider_tree [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.715210] env[62525]: DEBUG nova.network.neutron [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updated VIF entry in instance network info cache for port d060e85e-b0a9-45db-8fb4-2994f45e01f5. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1845.715653] env[62525]: DEBUG nova.network.neutron [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.741966] env[62525]: DEBUG oslo_vmware.api [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782174, 'name': PowerOnVM_Task, 'duration_secs': 0.395583} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.742267] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1845.742495] env[62525]: DEBUG nova.compute.manager [None req-519376f3-f351-4319-b559-38e5545c179f tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1845.743266] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c4127c-d91c-4e63-987c-c3db700296a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.835843] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f21989-7d04-79c0-1373-c4a4f1e0b669, 'name': SearchDatastore_Task, 'duration_secs': 0.010234} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.836212] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.836450] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1845.836683] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.836829] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.837009] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1845.837282] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2109ca4-e8e6-494e-a6dd-9843f4b1be2f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.845499] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1845.845678] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1845.846400] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-256ca5f5-0b1b-40a4-8898-54ed79503808 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.851998] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1845.851998] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52656cb5-e4a5-bec7-7422-a814aeb06041" [ 1845.851998] env[62525]: _type = "Task" [ 1845.851998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.861375] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52656cb5-e4a5-bec7-7422-a814aeb06041, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.862372] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150e71f9-4d13-4348-8fbd-2d5b3727c295 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.877436] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdb4536-700d-49d5-ba85-2808a993e1d4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.945819] env[62525]: DEBUG nova.objects.instance [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid 160a67ea-5044-4597-9a61-82e05b8aa778 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1845.996254] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782175, 'name': PowerOffVM_Task, 'duration_secs': 0.238753} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.996542] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1845.996707] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1845.996957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b5173b1-9e60-4ced-bcec-c9cd65ccb9b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.037825] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Releasing lock "refresh_cache-7c177d17-d0fe-4df6-900d-e1a6118bc79e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.038309] env[62525]: DEBUG nova.compute.manager [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1846.038504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1846.039433] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2444aa5-86c5-4144-a0ca-bb6801f97f4a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.047185] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1846.047440] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a31c5f1-c83d-4dd3-b834-00d7e6c44b5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.053691] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1846.053691] env[62525]: value = "task-1782177" [ 1846.053691] env[62525]: _type = "Task" [ 1846.053691] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.061677] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.090265] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.090511] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.090730] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] 61fa8887-db88-4adc-8c3f-ffc78e0e550d {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.091051] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-457a5677-9d09-4fce-9dca-3296b6ed635f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.097738] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1846.097738] env[62525]: value = "task-1782178" [ 1846.097738] env[62525]: _type = "Task" [ 1846.097738] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.105369] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.165661] env[62525]: DEBUG nova.scheduler.client.report [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1846.218812] env[62525]: DEBUG oslo_concurrency.lockutils [req-b50e021a-b957-48f7-99cb-b307cbdeaa70 req-9f880f2b-7565-4ee9-acc8-ad25b2f44fbe service nova] Releasing lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.362691] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52656cb5-e4a5-bec7-7422-a814aeb06041, 'name': SearchDatastore_Task, 'duration_secs': 0.008618} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.363493] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94663666-54ba-44dc-af16-bd1c1ecbc136 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.368564] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1846.368564] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5251b6b9-d12a-c8b0-5a57-89b45e8e32e6" [ 1846.368564] env[62525]: _type = "Task" [ 1846.368564] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.377451] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5251b6b9-d12a-c8b0-5a57-89b45e8e32e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.388267] env[62525]: DEBUG nova.compute.manager [None req-fbf2b815-9d65-42c6-ab32-7910dafa435b tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance disappeared during snapshot {{(pid=62525) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1846.524685] env[62525]: DEBUG nova.compute.manager [None req-fbf2b815-9d65-42c6-ab32-7910dafa435b tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Found 0 images (rotation: 2) {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1846.563787] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782177, 'name': PowerOffVM_Task, 'duration_secs': 0.183557} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.564690] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.564690] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.564690] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc4c626b-6160-4221-831d-0111629a7726 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.590932] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.591237] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.591432] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Deleting the datastore file [datastore1] 7c177d17-d0fe-4df6-900d-e1a6118bc79e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.591723] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7ae9a4c-b718-42be-a088-a432a2e443b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.599055] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for the task: (returnval){ [ 1846.599055] env[62525]: value = "task-1782180" [ 1846.599055] env[62525]: _type = "Task" [ 1846.599055] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.615483] env[62525]: DEBUG oslo_vmware.api [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136712} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.615721] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.615956] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.616151] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.616327] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.616493] env[62525]: INFO nova.compute.manager [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1846.616724] env[62525]: DEBUG oslo.service.loopingcall [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.616909] env[62525]: DEBUG nova.compute.manager [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1846.617008] env[62525]: DEBUG nova.network.neutron [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.670888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.671687] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1846.675681] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.050s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.880445] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5251b6b9-d12a-c8b0-5a57-89b45e8e32e6, 'name': SearchDatastore_Task, 'duration_secs': 0.023367} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.880848] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.881074] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1846.881302] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5207e25-cd5a-4cd6-af4e-413d1175eb9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.888741] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1846.888741] env[62525]: value = "task-1782181" [ 1846.888741] env[62525]: _type = "Task" [ 1846.888741] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.896515] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.953107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0e40f453-2f11-4d33-90c2-f37c08da0d7f tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.285s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.973665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "154ac489-69e4-41a8-90cf-b3d6196c4822" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.974343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.974563] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.974751] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.974970] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.978017] env[62525]: INFO nova.compute.manager [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Terminating instance [ 1846.981274] env[62525]: DEBUG nova.compute.manager [req-3f36614a-d717-4702-abc1-f6d5ed43c5fb req-2c3cccbc-b845-4332-86d0-a8d44d8f1f58 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Received event network-vif-deleted-84be5220-9eed-41b9-8882-cb2acc60aa3d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.981523] env[62525]: INFO nova.compute.manager [req-3f36614a-d717-4702-abc1-f6d5ed43c5fb req-2c3cccbc-b845-4332-86d0-a8d44d8f1f58 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Neutron deleted interface 84be5220-9eed-41b9-8882-cb2acc60aa3d; detaching it from the instance and deleting it from the info cache [ 1846.981821] env[62525]: DEBUG nova.network.neutron [req-3f36614a-d717-4702-abc1-f6d5ed43c5fb req-2c3cccbc-b845-4332-86d0-a8d44d8f1f58 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.983447] env[62525]: DEBUG nova.compute.manager [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1846.983727] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1846.984935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0fe40f-7652-4ca7-b130-e0b0529c1a73 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.994664] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1846.994952] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d888e690-5d7c-46aa-86c8-1bfd4d1ae007 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.000727] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1847.000727] env[62525]: value = "task-1782182" [ 1847.000727] env[62525]: _type = "Task" [ 1847.000727] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.008419] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.116660] env[62525]: DEBUG oslo_vmware.api [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Task: {'id': task-1782180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.357647} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.116985] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1847.117272] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1847.117543] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1847.118120] env[62525]: INFO nova.compute.manager [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1847.118254] env[62525]: DEBUG oslo.service.loopingcall [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.119023] env[62525]: DEBUG nova.compute.manager [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1847.119023] env[62525]: DEBUG nova.network.neutron [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1847.137889] env[62525]: DEBUG nova.network.neutron [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1847.179073] env[62525]: DEBUG nova.compute.utils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1847.183715] env[62525]: INFO nova.compute.claims [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1847.188129] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1847.188359] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.232939] env[62525]: DEBUG nova.policy [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '887b4b5be3e644a182ced389f3213be3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ea75b422b034b2b8bc55de69766ba75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1847.363176] env[62525]: DEBUG nova.network.neutron [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.398686] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782181, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.489887] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b2f967f-7243-415a-866f-7630777ec79c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.501465] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09757e56-fa47-4483-b3d8-f234a77745a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.522795] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782182, 'name': PowerOffVM_Task, 'duration_secs': 0.212287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.524864] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1847.524864] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1847.524864] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15255a8b-a445-4f52-8982-87aa9ac87b81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.538663] env[62525]: DEBUG nova.compute.manager [req-3f36614a-d717-4702-abc1-f6d5ed43c5fb req-2c3cccbc-b845-4332-86d0-a8d44d8f1f58 service nova] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Detach interface failed, port_id=84be5220-9eed-41b9-8882-cb2acc60aa3d, reason: Instance 61fa8887-db88-4adc-8c3f-ffc78e0e550d could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1847.586212] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Successfully created port: c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1847.607554] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1847.607554] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1847.607739] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleting the datastore file [datastore1] 154ac489-69e4-41a8-90cf-b3d6196c4822 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1847.607927] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dfd3841-106c-4b9e-ba8e-8382a3e9c2db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.614817] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1847.614817] env[62525]: value = "task-1782184" [ 1847.614817] env[62525]: _type = "Task" [ 1847.614817] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.624827] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.640911] env[62525]: DEBUG nova.network.neutron [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.688757] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1847.692732] env[62525]: INFO nova.compute.resource_tracker [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating resource usage from migration 0efe3fee-d92f-4629-8cc2-120967622612 [ 1847.866408] env[62525]: INFO nova.compute.manager [-] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Took 1.25 seconds to deallocate network for instance. [ 1847.902264] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.784622} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.902264] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1847.902264] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1847.902838] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-541bac2d-7ff9-438c-b924-c2f4d901da38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.910725] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1847.910725] env[62525]: value = "task-1782185" [ 1847.910725] env[62525]: _type = "Task" [ 1847.910725] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.919692] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.921391] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35f5343-845b-4629-b9d1-d791de34b5ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.928662] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886900fd-3cc0-48ab-aa8a-4a6425c42a03 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.962957] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ffbbff-241c-4497-bf77-9cd16e9d8bc3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.972052] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6075aac-c515-496f-b2c4-aacf8bf68c40 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.986854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.987154] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.987378] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.987586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.987758] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.989624] env[62525]: DEBUG nova.compute.provider_tree [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1847.991147] env[62525]: INFO nova.compute.manager [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Terminating instance [ 1847.992932] env[62525]: DEBUG nova.compute.manager [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1847.993166] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1847.993934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bd7450-c723-42ea-8644-d5b4e3509186 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.002077] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1848.002332] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d06b7ac0-3e99-4444-9561-60fb0ee882a4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.008102] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1848.008102] env[62525]: value = "task-1782186" [ 1848.008102] env[62525]: _type = "Task" [ 1848.008102] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.018871] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.126748] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.144069] env[62525]: INFO nova.compute.manager [-] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Took 1.03 seconds to deallocate network for instance. [ 1848.373058] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.421905] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142873} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.422081] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1848.422829] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd35ac8-a6f0-4fa7-a353-a037015ffa0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.444484] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1848.444758] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7f7c5df-c75a-4c0d-be73-6389c4d24f9e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.464625] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1848.464625] env[62525]: value = "task-1782187" [ 1848.464625] env[62525]: _type = "Task" [ 1848.464625] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.473586] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.518494] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782186, 'name': PowerOffVM_Task, 'duration_secs': 0.185263} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.518811] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1848.519017] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1848.519321] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-404b30e0-7185-4b54-8879-7351f7c39d73 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.524189] env[62525]: DEBUG nova.scheduler.client.report [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1848.524484] env[62525]: DEBUG nova.compute.provider_tree [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 152 to 153 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1848.524686] env[62525]: DEBUG nova.compute.provider_tree [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1848.615459] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1848.615776] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1848.615917] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleting the datastore file [datastore1] 160a67ea-5044-4597-9a61-82e05b8aa778 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1848.616265] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad80b161-5460-47f3-988b-0702f92f99e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.626338] env[62525]: DEBUG oslo_vmware.api [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.567512} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.627851] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1848.628051] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1848.628265] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1848.628462] env[62525]: INFO nova.compute.manager [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1848.628722] env[62525]: DEBUG oslo.service.loopingcall [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1848.628996] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1848.628996] env[62525]: value = "task-1782189" [ 1848.628996] env[62525]: _type = "Task" [ 1848.628996] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.629224] env[62525]: DEBUG nova.compute.manager [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1848.629324] env[62525]: DEBUG nova.network.neutron [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1848.638884] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.651020] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.704804] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1848.731036] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1848.731353] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1848.731535] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1848.731760] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1848.731931] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1848.732128] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1848.732373] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1848.732568] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1848.732770] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1848.732965] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1848.733210] env[62525]: DEBUG nova.virt.hardware [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1848.734146] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb3596b-eeb3-4b74-bb10-7774f0366022 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.742149] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719c21fc-1d9b-42a6-b7d6-08dab9af9ca5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.975710] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782187, 'name': ReconfigVM_Task, 'duration_secs': 0.286996} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.976336] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfigured VM instance instance-00000070 to attach disk [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1848.976695] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a868df1-b675-4f85-96bc-191eb0623ce9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.984133] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1848.984133] env[62525]: value = "task-1782190" [ 1848.984133] env[62525]: _type = "Task" [ 1848.984133] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.992270] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782190, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.019044] env[62525]: DEBUG nova.compute.manager [req-ae649223-9bc6-4ca2-b5e7-66dc0baae0e4 req-e9cbd1b2-1833-49bc-823b-9b52ec827c0c service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Received event network-vif-deleted-2e4896a2-e40f-4f74-8b88-e93af562023a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1849.019044] env[62525]: INFO nova.compute.manager [req-ae649223-9bc6-4ca2-b5e7-66dc0baae0e4 req-e9cbd1b2-1833-49bc-823b-9b52ec827c0c service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Neutron deleted interface 2e4896a2-e40f-4f74-8b88-e93af562023a; detaching it from the instance and deleting it from the info cache [ 1849.019044] env[62525]: DEBUG nova.network.neutron [req-ae649223-9bc6-4ca2-b5e7-66dc0baae0e4 req-e9cbd1b2-1833-49bc-823b-9b52ec827c0c service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.029665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.354s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.032030] env[62525]: INFO nova.compute.manager [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Migrating [ 1849.037481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.664s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.037941] env[62525]: DEBUG nova.objects.instance [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid 61fa8887-db88-4adc-8c3f-ffc78e0e550d {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.139416] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Successfully updated port: c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.145011] env[62525]: DEBUG oslo_vmware.api [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.38602} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.145312] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1849.145502] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1849.145677] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1849.145846] env[62525]: INFO nova.compute.manager [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1849.146096] env[62525]: DEBUG oslo.service.loopingcall [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1849.146288] env[62525]: DEBUG nova.compute.manager [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1849.146381] env[62525]: DEBUG nova.network.neutron [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1849.384894] env[62525]: DEBUG nova.network.neutron [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.495799] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782190, 'name': Rename_Task, 'duration_secs': 0.155287} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.496112] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1849.496370] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c91ea8bd-d28b-4b4b-971b-6a54cbf9739c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.503933] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1849.503933] env[62525]: value = "task-1782191" [ 1849.503933] env[62525]: _type = "Task" [ 1849.503933] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.515265] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.526166] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6732f936-6043-431f-a553-0c7838b1271b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.536256] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8deafef5-c138-4910-80ea-d1827877f701 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.550864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.551090] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.551331] env[62525]: DEBUG nova.network.neutron [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.576413] env[62525]: DEBUG nova.compute.manager [req-ae649223-9bc6-4ca2-b5e7-66dc0baae0e4 req-e9cbd1b2-1833-49bc-823b-9b52ec827c0c service nova] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Detach interface failed, port_id=2e4896a2-e40f-4f74-8b88-e93af562023a, reason: Instance 154ac489-69e4-41a8-90cf-b3d6196c4822 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1849.645925] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.646106] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.646265] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.814726] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfef020-da71-400d-9b96-d38fa4157ae8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.823403] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59e0766-6d2b-4344-a9b1-353dc3e6b49e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.854709] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96049028-6133-49ad-98ec-03d368ad9572 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.863132] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b9f86c-8fa5-489e-91d8-1324866a5b28 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.877308] env[62525]: DEBUG nova.compute.provider_tree [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.887554] env[62525]: INFO nova.compute.manager [-] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Took 1.26 seconds to deallocate network for instance. [ 1850.013429] env[62525]: DEBUG oslo_vmware.api [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782191, 'name': PowerOnVM_Task, 'duration_secs': 0.46414} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.013765] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1850.013973] env[62525]: INFO nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1850.014169] env[62525]: DEBUG nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1850.014956] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f663f1-e023-448d-ac1b-52e80346bf11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.095750] env[62525]: DEBUG nova.network.neutron [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.184085] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.381029] env[62525]: DEBUG nova.scheduler.client.report [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1850.385343] env[62525]: DEBUG nova.network.neutron [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.395910] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.401889] env[62525]: DEBUG nova.network.neutron [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.538961] env[62525]: INFO nova.compute.manager [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Took 16.05 seconds to build instance. [ 1850.599534] env[62525]: INFO nova.compute.manager [-] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Took 1.45 seconds to deallocate network for instance. [ 1850.888041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.890596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.892406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.241s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.892737] env[62525]: DEBUG nova.objects.instance [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lazy-loading 'resources' on Instance uuid 7c177d17-d0fe-4df6-900d-e1a6118bc79e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1850.903939] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.904275] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Instance network_info: |[{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1850.904704] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:85:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1850.912613] env[62525]: DEBUG oslo.service.loopingcall [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.913319] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1850.913545] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50d81f27-8780-4510-8956-b5198f4aea3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.931022] env[62525]: INFO nova.scheduler.client.report [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance 61fa8887-db88-4adc-8c3f-ffc78e0e550d [ 1850.936616] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1850.936616] env[62525]: value = "task-1782192" [ 1850.936616] env[62525]: _type = "Task" [ 1850.936616] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.944977] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782192, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.040742] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da111ed-a503-4b9f-ac9e-2b9402530eba tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.560s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.107654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.128635] env[62525]: DEBUG nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received event network-vif-plugged-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.128755] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Acquiring lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.128981] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.129161] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.129327] env[62525]: DEBUG nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] No waiting events found dispatching network-vif-plugged-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1851.129636] env[62525]: WARNING nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received unexpected event network-vif-plugged-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 for instance with vm_state building and task_state spawning. [ 1851.129636] env[62525]: DEBUG nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.129777] env[62525]: DEBUG nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing instance network info cache due to event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1851.129965] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Acquiring lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.130107] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Acquired lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.130282] env[62525]: DEBUG nova.network.neutron [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1851.439272] env[62525]: DEBUG oslo_concurrency.lockutils [None req-736c7af0-257f-4b0c-8db8-b8252dad2c92 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "61fa8887-db88-4adc-8c3f-ffc78e0e550d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.977s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.450578] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782192, 'name': CreateVM_Task, 'duration_secs': 0.343357} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.453218] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1851.454395] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.454395] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.455023] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1851.455307] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e9cd99-9050-40f2-8a13-6a575c012925 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.460398] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1851.460398] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520848b3-ae98-5e29-a9e9-b14b1ab5b9da" [ 1851.460398] env[62525]: _type = "Task" [ 1851.460398] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.472163] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520848b3-ae98-5e29-a9e9-b14b1ab5b9da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.573966] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898e340e-1b29-49e8-857c-ea062a2d3c9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.581739] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e439430e-f41f-4c01-b86c-28f020945682 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.613537] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74b908d-3fbf-4d9c-9091-7f1af41255b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.625677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cc9fad-b0ff-4ec7-87a3-6b9caac08e0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.642055] env[62525]: DEBUG nova.compute.provider_tree [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1851.832236] env[62525]: DEBUG nova.network.neutron [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updated VIF entry in instance network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1851.832626] env[62525]: DEBUG nova.network.neutron [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.973080] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520848b3-ae98-5e29-a9e9-b14b1ab5b9da, 'name': SearchDatastore_Task, 'duration_secs': 0.02233} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.973289] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.973523] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1851.973754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.973898] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.974078] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1851.974404] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffa78633-62f9-4b1e-9b68-348821714637 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.983925] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1851.984023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1851.984734] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d7915cf-cba5-4d1f-908c-e81e4d327aa6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.989885] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1851.989885] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529a1525-f1ed-6649-fbd3-b89b23cff76c" [ 1851.989885] env[62525]: _type = "Task" [ 1851.989885] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.997530] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a1525-f1ed-6649-fbd3-b89b23cff76c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.127368] env[62525]: DEBUG nova.compute.manager [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1852.144961] env[62525]: DEBUG nova.scheduler.client.report [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1852.335453] env[62525]: DEBUG oslo_concurrency.lockutils [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] Releasing lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.335754] env[62525]: DEBUG nova.compute.manager [req-fbef8ec0-a6ec-4d4e-8f18-b49a1206dc94 req-6a5cdd66-3732-42f2-9f5d-09865c4d020b service nova] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Received event network-vif-deleted-ae436399-b552-425e-a202-0742904e7a48 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1852.408147] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18712e11-2e17-4453-a0e8-55c69d7d69dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.427037] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1852.501021] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529a1525-f1ed-6649-fbd3-b89b23cff76c, 'name': SearchDatastore_Task, 'duration_secs': 0.009128} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.501274] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b315f0d-9ecf-4f1d-b55c-e8b5e47bee15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.506777] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1852.506777] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a33cc1-3998-73b8-0916-2787d5efd351" [ 1852.506777] env[62525]: _type = "Task" [ 1852.506777] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.514871] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a33cc1-3998-73b8-0916-2787d5efd351, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.644609] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.649531] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.652246] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.257s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.652487] env[62525]: DEBUG nova.objects.instance [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lazy-loading 'resources' on Instance uuid 154ac489-69e4-41a8-90cf-b3d6196c4822 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1852.666104] env[62525]: INFO nova.scheduler.client.report [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Deleted allocations for instance 7c177d17-d0fe-4df6-900d-e1a6118bc79e [ 1852.699156] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "52341e2f-b556-4f84-b60e-16a3e71df504" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.699415] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.932981] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1852.933287] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5300f94b-a11f-4d45-87d8-f8532dc14d95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.940280] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1852.940280] env[62525]: value = "task-1782193" [ 1852.940280] env[62525]: _type = "Task" [ 1852.940280] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.948498] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.018320] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a33cc1-3998-73b8-0916-2787d5efd351, 'name': SearchDatastore_Task, 'duration_secs': 0.00975} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.018579] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.018946] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 13020656-4e4f-40ee-a77a-fd64ae340e09/13020656-4e4f-40ee-a77a-fd64ae340e09.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1853.019270] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-240a4297-5739-4806-a4bd-0f2d939b5ffb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.025994] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1853.025994] env[62525]: value = "task-1782194" [ 1853.025994] env[62525]: _type = "Task" [ 1853.025994] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.035906] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.175959] env[62525]: DEBUG oslo_concurrency.lockutils [None req-585f9cad-bf91-4762-9848-00289f3f5bb4 tempest-ServersAaction247Test-480004059 tempest-ServersAaction247Test-480004059-project-member] Lock "7c177d17-d0fe-4df6-900d-e1a6118bc79e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.257s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.203354] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1853.357460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdb15cd-2b38-4122-b471-cd67d7c49c0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.365891] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea53f8cc-a8fd-458c-aea1-7c94a578bf2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.399947] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e095005f-7f38-4a22-b3bf-1ba1c5df2f90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.407941] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187157bd-3b7e-41c2-a19f-539dcd433ccd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.422673] env[62525]: DEBUG nova.compute.provider_tree [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.449396] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782193, 'name': PowerOffVM_Task, 'duration_secs': 0.189266} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.449671] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1853.449865] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1853.536278] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435949} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.536559] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 13020656-4e4f-40ee-a77a-fd64ae340e09/13020656-4e4f-40ee-a77a-fd64ae340e09.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1853.536775] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1853.537038] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19b43080-9d9f-4644-8cac-6a5b2b4cc6c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.542997] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1853.542997] env[62525]: value = "task-1782195" [ 1853.542997] env[62525]: _type = "Task" [ 1853.542997] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.552184] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.722686] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.926432] env[62525]: DEBUG nova.scheduler.client.report [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1853.957981] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1853.958329] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1853.958425] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1853.958614] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1853.958770] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1853.958931] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1853.959164] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1853.959345] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1853.959562] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1853.959774] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1853.959954] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1853.966062] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6086018-725a-4a17-a59e-fb2cb48930d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.983837] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1853.983837] env[62525]: value = "task-1782196" [ 1853.983837] env[62525]: _type = "Task" [ 1853.983837] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.991706] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.053546] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075785} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.053848] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1854.054646] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f080c83b-ec27-4645-9be6-db36b6b49449 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.076504] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 13020656-4e4f-40ee-a77a-fd64ae340e09/13020656-4e4f-40ee-a77a-fd64ae340e09.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1854.076768] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a444ac4a-ea19-44df-a6d1-af9803673afc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.099716] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1854.099716] env[62525]: value = "task-1782197" [ 1854.099716] env[62525]: _type = "Task" [ 1854.099716] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.108077] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.433072] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.434420] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.327s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.434694] env[62525]: DEBUG nova.objects.instance [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'resources' on Instance uuid 160a67ea-5044-4597-9a61-82e05b8aa778 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1854.454540] env[62525]: INFO nova.scheduler.client.report [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleted allocations for instance 154ac489-69e4-41a8-90cf-b3d6196c4822 [ 1854.494555] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782196, 'name': ReconfigVM_Task, 'duration_secs': 0.316724} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.494879] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1854.610073] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.962393] env[62525]: DEBUG oslo_concurrency.lockutils [None req-71b67e3f-1bd7-4c06-9833-56afdc9b3bd3 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "154ac489-69e4-41a8-90cf-b3d6196c4822" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.988s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.001655] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1855.001894] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1855.002085] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1855.002283] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1855.002429] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1855.002575] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1855.002771] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1855.002929] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1855.003107] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1855.003272] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1855.003517] env[62525]: DEBUG nova.virt.hardware [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1855.008810] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1855.011023] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef72c680-d78c-419d-ab5d-1adb3c61b8f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.029633] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1855.029633] env[62525]: value = "task-1782198" [ 1855.029633] env[62525]: _type = "Task" [ 1855.029633] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.041863] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782198, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.107661] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573a4917-b9cc-4d11-810b-967e11d93ac7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.113203] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782197, 'name': ReconfigVM_Task, 'duration_secs': 0.636656} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.113831] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 13020656-4e4f-40ee-a77a-fd64ae340e09/13020656-4e4f-40ee-a77a-fd64ae340e09.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1855.114470] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d554341-7471-4c55-9124-69557560e363 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.118967] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4966a8e3-f1e3-4175-a0ff-6c6047e0c408 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.123488] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "69a1093a-95d7-4cbb-90bf-1a213470872a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.123737] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.123955] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.124151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.124322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.125878] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1855.125878] env[62525]: value = "task-1782199" [ 1855.125878] env[62525]: _type = "Task" [ 1855.125878] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.126328] env[62525]: INFO nova.compute.manager [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Terminating instance [ 1855.155511] env[62525]: DEBUG nova.compute.manager [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1855.155728] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1855.157024] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a56695-9a73-44dc-b386-ef5f9f4d75b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.160462] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae972ed2-0c45-48aa-92e5-729b9f147745 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.166401] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782199, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.169817] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.171688] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e2287aa-ad23-4223-927e-57d00e9fd000 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.174017] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f5ad41-7d56-49f3-932f-71acfb51893e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.187071] env[62525]: DEBUG nova.compute.provider_tree [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.189300] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1855.189300] env[62525]: value = "task-1782200" [ 1855.189300] env[62525]: _type = "Task" [ 1855.189300] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.196711] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.539320] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782198, 'name': ReconfigVM_Task, 'duration_secs': 0.173809} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.539621] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1855.540396] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aa5334-62f8-42c4-afde-81ea93ff2433 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.564837] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.564837] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-376c1d54-771b-4dc1-9dbc-9404c5b1e7d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.583048] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1855.583048] env[62525]: value = "task-1782201" [ 1855.583048] env[62525]: _type = "Task" [ 1855.583048] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.591132] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.638030] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782199, 'name': Rename_Task, 'duration_secs': 0.163467} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.638855] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1855.638855] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f4a756c-6a81-4d98-a6f2-385a43a17efe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.644672] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1855.644672] env[62525]: value = "task-1782202" [ 1855.644672] env[62525]: _type = "Task" [ 1855.644672] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.652828] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.690753] env[62525]: DEBUG nova.scheduler.client.report [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1855.703012] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782200, 'name': PowerOffVM_Task, 'duration_secs': 0.189051} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.703295] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1855.703462] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1855.703715] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8780585b-4637-48ad-a48d-b2ea89cbdcfe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.067603] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.067820] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.067999] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleting the datastore file [datastore1] 69a1093a-95d7-4cbb-90bf-1a213470872a {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.068291] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99be047f-f154-401d-a92a-2e17857f6aee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.074578] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for the task: (returnval){ [ 1856.074578] env[62525]: value = "task-1782204" [ 1856.074578] env[62525]: _type = "Task" [ 1856.074578] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.082472] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.091675] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782201, 'name': ReconfigVM_Task, 'duration_secs': 0.450482} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.091932] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfigured VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.092227] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.155213] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782202, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.199022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.201588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.557s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.218884] env[62525]: INFO nova.scheduler.client.report [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted allocations for instance 160a67ea-5044-4597-9a61-82e05b8aa778 [ 1856.586311] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.598526] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b092ac-01ba-464d-8d83-301c1d5f11d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.620628] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81241b0f-dbd6-4f69-be04-3bc68ebf86f5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.639126] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.655632] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782202, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.708235] env[62525]: INFO nova.compute.claims [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1856.726491] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0da5768f-4696-4ade-9cbf-5ac76432581e tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "160a67ea-5044-4597-9a61-82e05b8aa778" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.739s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.086341] env[62525]: DEBUG oslo_vmware.api [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Task: {'id': task-1782204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.635834} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.086628] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.086817] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1857.086993] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1857.087191] env[62525]: INFO nova.compute.manager [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Took 1.93 seconds to destroy the instance on the hypervisor. [ 1857.087413] env[62525]: DEBUG oslo.service.loopingcall [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.087605] env[62525]: DEBUG nova.compute.manager [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1857.087701] env[62525]: DEBUG nova.network.neutron [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1857.161650] env[62525]: DEBUG oslo_vmware.api [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782202, 'name': PowerOnVM_Task, 'duration_secs': 1.217645} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.162065] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1857.162336] env[62525]: INFO nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1857.162555] env[62525]: DEBUG nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1857.163412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d4a6d-6a2b-45da-9206-fbf975e9f498 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.214650] env[62525]: INFO nova.compute.resource_tracker [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating resource usage from migration 437fd501-04d0-4c8e-a038-403e281e8b8e [ 1857.223910] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.224182] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.252831] env[62525]: DEBUG nova.network.neutron [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Port 931ab189-c48d-469b-8776-5e4d3c8cf77a binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1857.386274] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e314413-163d-4b79-a3a5-c666ad8d9019 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.394339] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3c0087-ed25-44a2-bc7f-745f3dcf91f3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.428050] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6121d6d-3c95-498f-a4ce-2dc664099fd2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.436364] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c4caed-2bad-4482-8bd1-a29c576a7254 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.451945] env[62525]: DEBUG nova.compute.provider_tree [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.501152] env[62525]: DEBUG nova.compute.manager [req-3cc82062-d7df-4c83-829a-fa92704d2ad7 req-75c272ae-c6a4-4065-900f-5dafd5e47caf service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Received event network-vif-deleted-9fb5fc43-3369-489c-829b-506754512d51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.501397] env[62525]: INFO nova.compute.manager [req-3cc82062-d7df-4c83-829a-fa92704d2ad7 req-75c272ae-c6a4-4065-900f-5dafd5e47caf service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Neutron deleted interface 9fb5fc43-3369-489c-829b-506754512d51; detaching it from the instance and deleting it from the info cache [ 1857.501572] env[62525]: DEBUG nova.network.neutron [req-3cc82062-d7df-4c83-829a-fa92704d2ad7 req-75c272ae-c6a4-4065-900f-5dafd5e47caf service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.683470] env[62525]: INFO nova.compute.manager [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Took 18.72 seconds to build instance. [ 1857.728066] env[62525]: INFO nova.compute.manager [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Detaching volume d818f996-4266-47c4-ab1e-9827cfc22a7d [ 1857.761842] env[62525]: INFO nova.virt.block_device [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Attempting to driver detach volume d818f996-4266-47c4-ab1e-9827cfc22a7d from mountpoint /dev/sdb [ 1857.762127] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1857.762344] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1857.763560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1e4672-99e6-445c-b68a-24be29872940 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.786275] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7a321f-cb12-4b5f-a52a-e13f9a600b67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.794694] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c19566-c96d-4f07-9946-77cb08afa272 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.817936] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce05a06-e498-455a-b551-2a3423e4cb61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.834935] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] The volume has not been displaced from its original location: [datastore1] volume-d818f996-4266-47c4-ab1e-9827cfc22a7d/volume-d818f996-4266-47c4-ab1e-9827cfc22a7d.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1857.840249] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1857.840849] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbceea8e-1a36-4f5d-8a9f-36a266ecd650 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.861352] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1857.861352] env[62525]: value = "task-1782205" [ 1857.861352] env[62525]: _type = "Task" [ 1857.861352] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.869655] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782205, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.955777] env[62525]: DEBUG nova.scheduler.client.report [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1857.977456] env[62525]: DEBUG nova.network.neutron [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.010886] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce6fd0e8-eb38-46af-91af-4c5675e97ea0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.018342] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6f9180-78c0-483b-8aec-1931741cfe81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.049079] env[62525]: DEBUG nova.compute.manager [req-3cc82062-d7df-4c83-829a-fa92704d2ad7 req-75c272ae-c6a4-4065-900f-5dafd5e47caf service nova] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Detach interface failed, port_id=9fb5fc43-3369-489c-829b-506754512d51, reason: Instance 69a1093a-95d7-4cbb-90bf-1a213470872a could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1858.185148] env[62525]: DEBUG oslo_concurrency.lockutils [None req-748a6d24-aa47-4e4d-81fa-6d0003922228 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.582s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.274641] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.274856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.275038] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.335486] env[62525]: DEBUG nova.compute.manager [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.335816] env[62525]: DEBUG nova.compute.manager [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing instance network info cache due to event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1858.335882] env[62525]: DEBUG oslo_concurrency.lockutils [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] Acquiring lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.336039] env[62525]: DEBUG oslo_concurrency.lockutils [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] Acquired lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.336231] env[62525]: DEBUG nova.network.neutron [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1858.373370] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782205, 'name': ReconfigVM_Task, 'duration_secs': 0.227006} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.373684] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1858.378277] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1f4e225-3e03-452b-aab4-acdb6b2f2fb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.393726] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1858.393726] env[62525]: value = "task-1782206" [ 1858.393726] env[62525]: _type = "Task" [ 1858.393726] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.401681] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.408555] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.408951] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.461324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.260s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.461609] env[62525]: INFO nova.compute.manager [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Migrating [ 1858.471905] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.749s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.474465] env[62525]: INFO nova.compute.claims [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1858.481614] env[62525]: INFO nova.compute.manager [-] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Took 1.39 seconds to deallocate network for instance. [ 1858.904041] env[62525]: DEBUG oslo_vmware.api [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782206, 'name': ReconfigVM_Task, 'duration_secs': 0.206252} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.904422] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369833', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'name': 'volume-d818f996-4266-47c4-ab1e-9827cfc22a7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '50ee564d-7b27-4bc4-a95e-7717de865cfb', 'attached_at': '', 'detached_at': '', 'volume_id': 'd818f996-4266-47c4-ab1e-9827cfc22a7d', 'serial': 'd818f996-4266-47c4-ab1e-9827cfc22a7d'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1858.911667] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1858.946833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "13020656-4e4f-40ee-a77a-fd64ae340e09" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.946833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.946833] env[62525]: INFO nova.compute.manager [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Rebooting instance [ 1858.991428] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.991624] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.991852] env[62525]: DEBUG nova.network.neutron [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.996762] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.073344] env[62525]: DEBUG nova.network.neutron [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updated VIF entry in instance network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.073743] env[62525]: DEBUG nova.network.neutron [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.311631] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.311813] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.311994] env[62525]: DEBUG nova.network.neutron [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.436141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.455801] env[62525]: DEBUG nova.objects.instance [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'flavor' on Instance uuid 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1859.463566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.576686] env[62525]: DEBUG oslo_concurrency.lockutils [req-1bab8113-9156-4a77-8382-afd70eba41c2 req-3a8b96ff-122f-47fe-ae2f-2ad16ebc1afe service nova] Releasing lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.577135] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquired lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.577334] env[62525]: DEBUG nova.network.neutron [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.661681] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8362cb-cf1d-4cd1-8ec0-3f32dea9f17e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.670195] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e648ef7-1d22-4061-a338-c041746be3a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.701235] env[62525]: DEBUG nova.network.neutron [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.703563] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541baa8c-89aa-4247-ae0b-995438b3392b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.712204] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85084f27-659f-447a-b605-c29fb9ef1f52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.726277] env[62525]: DEBUG nova.compute.provider_tree [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1860.024863] env[62525]: DEBUG nova.network.neutron [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.204262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.229054] env[62525]: DEBUG nova.scheduler.client.report [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1860.277014] env[62525]: DEBUG nova.network.neutron [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.462288] env[62525]: DEBUG oslo_concurrency.lockutils [None req-c1c557eb-075e-4453-9645-7a802fed6bd4 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.238s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.527363] env[62525]: DEBUG oslo_concurrency.lockutils [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.595482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.595734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.595940] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.596141] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.596665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.598262] env[62525]: INFO nova.compute.manager [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Terminating instance [ 1860.600013] env[62525]: DEBUG nova.compute.manager [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1860.600292] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1860.601132] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37b91f8-86eb-4680-905a-dead3e6e8df9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.609326] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1860.609552] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3949d5c5-e284-45fd-9307-e7c3ca6e37a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.615508] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1860.615508] env[62525]: value = "task-1782207" [ 1860.615508] env[62525]: _type = "Task" [ 1860.615508] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.623223] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.735533] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.736152] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1860.738987] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.742s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.739209] env[62525]: DEBUG nova.objects.instance [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lazy-loading 'resources' on Instance uuid 69a1093a-95d7-4cbb-90bf-1a213470872a {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.779367] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Releasing lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.781444] env[62525]: DEBUG nova.compute.manager [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1860.782401] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc4ed25-923f-4a8b-8f3b-7b4fcdae917b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.048143] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded501dc-ddd3-4b93-9a66-d24d0decf1c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.068756] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11228c6-eff6-4039-a06e-217f195bd962 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.075788] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1861.125497] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782207, 'name': PowerOffVM_Task, 'duration_secs': 0.200952} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.125755] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1861.125920] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1861.126181] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6f19335-2adf-486f-ac99-7a4dc6f0a2dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.197733] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1861.197958] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1861.198158] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1861.198469] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c10ec2db-cd78-4077-9873-78f5f0f4e485 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.205044] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1861.205044] env[62525]: value = "task-1782209" [ 1861.205044] env[62525]: _type = "Task" [ 1861.205044] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.216911] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.241728] env[62525]: DEBUG nova.compute.utils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1861.246115] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1861.246289] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1861.326785] env[62525]: DEBUG nova.policy [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1861.408859] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c9ed68-14dc-430a-b117-86cf467d9aba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.416777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f269af-a77f-4daf-b970-302871c7380e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.447991] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673685cf-da22-42e6-b5b0-8bbc9d533b37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.455658] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1c141a-6052-4391-abc7-598867de1258 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.469327] env[62525]: DEBUG nova.compute.provider_tree [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.584476] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1861.585888] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50ff3e42-9e00-45ea-860d-d07281ade9a9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.593026] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Successfully created port: f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1861.595735] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1861.595735] env[62525]: value = "task-1782210" [ 1861.595735] env[62525]: _type = "Task" [ 1861.595735] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.605253] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.716483] env[62525]: DEBUG oslo_vmware.api [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131741} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.716760] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1861.716959] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1861.717161] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1861.717350] env[62525]: INFO nova.compute.manager [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1861.717612] env[62525]: DEBUG oslo.service.loopingcall [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.719810] env[62525]: DEBUG nova.compute.manager [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1861.719919] env[62525]: DEBUG nova.network.neutron [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1861.722520] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394f8880-0f23-4574-977a-f722f0461942 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.741660] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1861.745172] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1861.799123] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a917f3eb-d8e1-45a0-8d53-d7964926492f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.807861] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Doing hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1861.808125] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3a6fe9a5-3bbb-4e37-9a25-4806d9274fd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.815299] env[62525]: DEBUG oslo_vmware.api [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1861.815299] env[62525]: value = "task-1782211" [ 1861.815299] env[62525]: _type = "Task" [ 1861.815299] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.823651] env[62525]: DEBUG oslo_vmware.api [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782211, 'name': ResetVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.972665] env[62525]: DEBUG nova.scheduler.client.report [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1862.106875] env[62525]: DEBUG oslo_vmware.api [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782210, 'name': PowerOnVM_Task, 'duration_secs': 0.383346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.107142] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1862.107287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-65d49471-d4d2-4eea-be67-f66913a25c75 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance 'e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.249966] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1862.250402] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7db41395-1ebc-428c-a8c9-42e5f47e3de3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.262141] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1862.262141] env[62525]: value = "task-1782212" [ 1862.262141] env[62525]: _type = "Task" [ 1862.262141] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.270835] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.325463] env[62525]: DEBUG oslo_vmware.api [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782211, 'name': ResetVM_Task, 'duration_secs': 0.10778} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.326187] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Did hard reboot of VM {{(pid=62525) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1862.326432] env[62525]: DEBUG nova.compute.manager [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1862.327248] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09588b4-30de-4207-9f18-692ddc618293 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.478704] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.480914] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.045s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.482543] env[62525]: INFO nova.compute.claims [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1862.488780] env[62525]: DEBUG nova.compute.manager [req-7a8760b4-cfd6-4c94-b91c-70a5adca2e3f req-3164a490-683b-41cd-8d23-de00483a509a service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Received event network-vif-deleted-d205d712-e184-43b0-93aa-3e45e7674f76 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1862.488780] env[62525]: INFO nova.compute.manager [req-7a8760b4-cfd6-4c94-b91c-70a5adca2e3f req-3164a490-683b-41cd-8d23-de00483a509a service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Neutron deleted interface d205d712-e184-43b0-93aa-3e45e7674f76; detaching it from the instance and deleting it from the info cache [ 1862.488780] env[62525]: DEBUG nova.network.neutron [req-7a8760b4-cfd6-4c94-b91c-70a5adca2e3f req-3164a490-683b-41cd-8d23-de00483a509a service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.505973] env[62525]: INFO nova.scheduler.client.report [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Deleted allocations for instance 69a1093a-95d7-4cbb-90bf-1a213470872a [ 1862.760027] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1862.772184] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782212, 'name': PowerOffVM_Task, 'duration_secs': 0.210381} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.772447] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1862.772639] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.785044] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1862.785500] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1862.785674] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1862.785859] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1862.786018] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1862.786263] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1862.786386] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1862.786544] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1862.786709] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1862.786870] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1862.787052] env[62525]: DEBUG nova.virt.hardware [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1862.787866] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4ce2fc-ecca-4cf6-abd2-a4447d772734 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.795614] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586dd7c8-ac2c-45a8-a45f-52356370a800 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.838776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-52d3f9c5-acf3-4478-9fea-26da898fe035 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.892s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.965438] env[62525]: DEBUG nova.network.neutron [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.990992] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff856173-e7b3-4980-b7df-b6c0d50a3e32 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.002026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c05e68-39cd-4f2f-835c-a72920e6fdac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.016823] env[62525]: DEBUG oslo_concurrency.lockutils [None req-19f7841a-8cb4-4427-9042-e7f3f70f0653 tempest-ServerRescueTestJSON-367709214 tempest-ServerRescueTestJSON-367709214-project-member] Lock "69a1093a-95d7-4cbb-90bf-1a213470872a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.893s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.036266] env[62525]: DEBUG nova.compute.manager [req-7a8760b4-cfd6-4c94-b91c-70a5adca2e3f req-3164a490-683b-41cd-8d23-de00483a509a service nova] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Detach interface failed, port_id=d205d712-e184-43b0-93aa-3e45e7674f76, reason: Instance 50ee564d-7b27-4bc4-a95e-7717de865cfb could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1863.198593] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Successfully updated port: f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1863.284769] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1863.285120] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1863.285358] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1863.285620] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1863.285862] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1863.286077] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1863.286424] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1863.286562] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1863.286785] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1863.287036] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1863.287261] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1863.293290] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2314127-cf2f-40c6-8133-19b93d2ae8b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.310500] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1863.310500] env[62525]: value = "task-1782213" [ 1863.310500] env[62525]: _type = "Task" [ 1863.310500] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.323586] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.401400] env[62525]: DEBUG nova.compute.manager [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1863.401621] env[62525]: DEBUG nova.compute.manager [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing instance network info cache due to event network-changed-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1863.401843] env[62525]: DEBUG oslo_concurrency.lockutils [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] Acquiring lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.402150] env[62525]: DEBUG oslo_concurrency.lockutils [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] Acquired lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.402905] env[62525]: DEBUG nova.network.neutron [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Refreshing network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1863.468553] env[62525]: INFO nova.compute.manager [-] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Took 1.75 seconds to deallocate network for instance. [ 1863.668141] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709f83ba-dfe9-4642-8000-6d74efd58061 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.676371] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d527609b-7d19-4c0b-b9e5-c1588e2650e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.710962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.711182] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.711405] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1863.713316] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0db8411-2654-4d4f-a315-8301d0bdd3fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.722182] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef961ac-0e39-4aab-a637-ffe3de0944eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.738929] env[62525]: DEBUG nova.compute.provider_tree [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1863.821930] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782213, 'name': ReconfigVM_Task, 'duration_secs': 0.15165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.822303] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1863.976155] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.033076] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "13020656-4e4f-40ee-a77a-fd64ae340e09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.033319] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.033541] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.033835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.034026] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.036297] env[62525]: INFO nova.compute.manager [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Terminating instance [ 1864.038055] env[62525]: DEBUG nova.compute.manager [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1864.038249] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1864.039124] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183070f4-d5b5-45b5-8041-eb47ecec2846 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.050343] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1864.050583] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-333a78d2-7a65-44a9-ab76-69d9435589d5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.058170] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1864.058170] env[62525]: value = "task-1782214" [ 1864.058170] env[62525]: _type = "Task" [ 1864.058170] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.068246] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.242394] env[62525]: DEBUG nova.scheduler.client.report [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1864.277112] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1864.301436] env[62525]: DEBUG nova.network.neutron [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updated VIF entry in instance network info cache for port c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.301824] env[62525]: DEBUG nova.network.neutron [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [{"id": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "address": "fa:16:3e:d6:85:91", "network": {"id": "924cfba6-f086-48eb-8cee-ccb6857b7962", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1429831995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea75b422b034b2b8bc55de69766ba75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4fbe07a-10", "ovs_interfaceid": "c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.328915] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1864.329157] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1864.329382] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.329569] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1864.329722] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.329869] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1864.330425] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1864.330668] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1864.330841] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1864.331014] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1864.331203] env[62525]: DEBUG nova.virt.hardware [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1864.336550] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1864.337129] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e479b82e-6e53-4e3a-938a-f035ea4edd53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.359930] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1864.359930] env[62525]: value = "task-1782215" [ 1864.359930] env[62525]: _type = "Task" [ 1864.359930] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.368347] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782215, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.520150] env[62525]: DEBUG nova.compute.manager [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Received event network-vif-plugged-f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1864.520380] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Acquiring lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.520603] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.520822] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.521016] env[62525]: DEBUG nova.compute.manager [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] No waiting events found dispatching network-vif-plugged-f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1864.521186] env[62525]: WARNING nova.compute.manager [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Received unexpected event network-vif-plugged-f44dec54-aad8-44f9-a6d0-8a6985797a65 for instance with vm_state building and task_state spawning. [ 1864.521450] env[62525]: DEBUG nova.compute.manager [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Received event network-changed-f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1864.521590] env[62525]: DEBUG nova.compute.manager [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Refreshing instance network info cache due to event network-changed-f44dec54-aad8-44f9-a6d0-8a6985797a65. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1864.521766] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Acquiring lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.568725] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782214, 'name': PowerOffVM_Task, 'duration_secs': 0.238093} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.568997] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1864.569248] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1864.569470] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fca5a05-aef1-4ca8-94cf-63a72294e53e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.573737] env[62525]: DEBUG nova.network.neutron [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Port 931ab189-c48d-469b-8776-5e4d3c8cf77a binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1864.573988] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.574160] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.574386] env[62525]: DEBUG nova.network.neutron [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1864.576555] env[62525]: DEBUG nova.network.neutron [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Updating instance_info_cache with network_info: [{"id": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "address": "fa:16:3e:bb:80:a5", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf44dec54-aa", "ovs_interfaceid": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.728826] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1864.729290] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1864.730067] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleting the datastore file [datastore1] 13020656-4e4f-40ee-a77a-fd64ae340e09 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1864.730067] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53e22feb-cdfa-4daa-ae9c-2ed239ab123e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.742075] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1864.742075] env[62525]: value = "task-1782217" [ 1864.742075] env[62525]: _type = "Task" [ 1864.742075] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.748266] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.748794] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1864.751518] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.776s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.751741] env[62525]: DEBUG nova.objects.instance [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'resources' on Instance uuid 50ee564d-7b27-4bc4-a95e-7717de865cfb {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1864.757378] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.810455] env[62525]: DEBUG oslo_concurrency.lockutils [req-a4d1da89-5d22-4094-937f-4b2f68468fb5 req-18bbe915-bffe-4b33-9010-a1bf7249f6be service nova] Releasing lock "refresh_cache-13020656-4e4f-40ee-a77a-fd64ae340e09" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.869789] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782215, 'name': ReconfigVM_Task, 'duration_secs': 0.310346} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.870122] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1864.870891] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23868f7a-7672-4f8f-af06-0e029470de5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.893989] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.894300] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f82698ef-f064-4bfd-9ba8-e56ce9265c3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.912396] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1864.912396] env[62525]: value = "task-1782218" [ 1864.912396] env[62525]: _type = "Task" [ 1864.912396] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.920037] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782218, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.082118] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.082118] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Instance network_info: |[{"id": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "address": "fa:16:3e:bb:80:a5", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf44dec54-aa", "ovs_interfaceid": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1865.082118] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Acquired lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.082118] env[62525]: DEBUG nova.network.neutron [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Refreshing network info cache for port f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1865.083655] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:80:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f44dec54-aad8-44f9-a6d0-8a6985797a65', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1865.093994] env[62525]: DEBUG oslo.service.loopingcall [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.097908] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1865.098440] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-baaf2bb2-d35a-4a86-a52e-e09cf521580c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.125806] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1865.125806] env[62525]: value = "task-1782219" [ 1865.125806] env[62525]: _type = "Task" [ 1865.125806] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.135834] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782219, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.255080] env[62525]: DEBUG nova.compute.utils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1865.258845] env[62525]: DEBUG oslo_vmware.api [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139333} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.263685] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1865.263685] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1865.264248] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1865.264671] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1865.265369] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1865.265369] env[62525]: INFO nova.compute.manager [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1865.265561] env[62525]: DEBUG oslo.service.loopingcall [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.267792] env[62525]: DEBUG nova.compute.manager [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1865.267938] env[62525]: DEBUG nova.network.neutron [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1865.380889] env[62525]: DEBUG nova.policy [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '391b41cf09fd42879d3f5cd3153c2045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a46df22dac6f473b8395f9302c3a4a75', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1865.423586] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.435342] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7496cb1-e25c-4274-8e1e-dbf01b6bb083 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.442816] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d35a4-4834-47c2-adca-27c5b15ca3f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.485908] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf249c0-c2a0-4337-8dc4-4f751b7a4581 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.498108] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f8fe63-d5fb-4099-ae8f-b79626bcf733 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.512296] env[62525]: DEBUG nova.compute.provider_tree [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.573969] env[62525]: DEBUG nova.network.neutron [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.583520] env[62525]: DEBUG nova.network.neutron [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Updated VIF entry in instance network info cache for port f44dec54-aad8-44f9-a6d0-8a6985797a65. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1865.583520] env[62525]: DEBUG nova.network.neutron [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Updating instance_info_cache with network_info: [{"id": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "address": "fa:16:3e:bb:80:a5", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf44dec54-aa", "ovs_interfaceid": "f44dec54-aad8-44f9-a6d0-8a6985797a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.636649] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782219, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.644760] env[62525]: DEBUG nova.compute.manager [req-8f69ac73-3f9d-4b18-86a9-10180210848b req-8f868c1e-576c-41ec-bdf1-b47efede644e service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Received event network-vif-deleted-c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1865.645137] env[62525]: INFO nova.compute.manager [req-8f69ac73-3f9d-4b18-86a9-10180210848b req-8f868c1e-576c-41ec-bdf1-b47efede644e service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Neutron deleted interface c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772; detaching it from the instance and deleting it from the info cache [ 1865.645349] env[62525]: DEBUG nova.network.neutron [req-8f69ac73-3f9d-4b18-86a9-10180210848b req-8f868c1e-576c-41ec-bdf1-b47efede644e service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.764564] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1865.796446] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Successfully created port: 4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1865.923634] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.015521] env[62525]: DEBUG nova.scheduler.client.report [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1866.077512] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.080539] env[62525]: DEBUG oslo_concurrency.lockutils [req-d3eda636-d5aa-45a5-806f-a9e96db03847 req-c2454d15-a9a8-484e-80bc-c88a25f06382 service nova] Releasing lock "refresh_cache-52341e2f-b556-4f84-b60e-16a3e71df504" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.086112] env[62525]: DEBUG nova.network.neutron [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.141122] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782219, 'name': CreateVM_Task, 'duration_secs': 0.795365} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.142033] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1866.142144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.142317] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.142682] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1866.142950] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3450caf4-b260-4ec8-b723-ca5b1ddcdb94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.148091] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1866.148091] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52af8ae9-b1b3-cb91-664a-c83941c811c6" [ 1866.148091] env[62525]: _type = "Task" [ 1866.148091] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.148317] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d948ba9f-3bbb-4dc9-802d-a682d72a6ad7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.158745] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52af8ae9-b1b3-cb91-664a-c83941c811c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.162540] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c81c607-d90b-4d55-ae6b-94ab41e3647c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.192368] env[62525]: DEBUG nova.compute.manager [req-8f69ac73-3f9d-4b18-86a9-10180210848b req-8f868c1e-576c-41ec-bdf1-b47efede644e service nova] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Detach interface failed, port_id=c4fbe07a-10f5-4ce3-b24b-b4b49c7f4772, reason: Instance 13020656-4e4f-40ee-a77a-fd64ae340e09 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1866.423726] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782218, 'name': ReconfigVM_Task, 'duration_secs': 1.061881} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.424116] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Reconfigured VM instance instance-00000070 to attach disk [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a/c4e31de8-0b94-4fea-aa30-8af5608d257a.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1866.424353] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1866.521674] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.770s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.545019] env[62525]: INFO nova.scheduler.client.report [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocations for instance 50ee564d-7b27-4bc4-a95e-7717de865cfb [ 1866.580643] env[62525]: DEBUG nova.compute.manager [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62525) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1866.580845] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.581133] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.588258] env[62525]: INFO nova.compute.manager [-] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Took 1.32 seconds to deallocate network for instance. [ 1866.661261] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52af8ae9-b1b3-cb91-664a-c83941c811c6, 'name': SearchDatastore_Task, 'duration_secs': 0.014527} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.661599] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.662031] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1866.662243] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.662470] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.662757] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.663178] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b97175a-8be0-4d4b-9b01-2062bc0eff94 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.672256] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.672442] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1866.673198] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27ea7e74-be98-49c9-a5ef-d77d69d377a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.678612] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1866.678612] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520bb829-1666-2e23-9eef-067bc36a6c8c" [ 1866.678612] env[62525]: _type = "Task" [ 1866.678612] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.687110] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520bb829-1666-2e23-9eef-067bc36a6c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.773399] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1866.800205] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1866.800445] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1866.800606] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1866.800772] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1866.800943] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1866.801155] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1866.801401] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1866.801551] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1866.801723] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1866.801886] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1866.802074] env[62525]: DEBUG nova.virt.hardware [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1866.802961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1c4b4a-6ec4-47ed-8b4e-bb04cda3ceff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.811351] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adc2edb-9505-4b72-b56e-50a42bc36afe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.935561] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d43663-4e21-4f44-9c76-b343a8902833 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.994757] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdebdbef-de30-4d8d-895e-14191822bffc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.016436] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.053887] env[62525]: DEBUG oslo_concurrency.lockutils [None req-bee84148-5684-46c2-b1a4-d3f595791acf tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "50ee564d-7b27-4bc4-a95e-7717de865cfb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.458s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.083967] env[62525]: DEBUG nova.objects.instance [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'migration_context' on Instance uuid e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1867.095192] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.189426] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520bb829-1666-2e23-9eef-067bc36a6c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.010545} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.190170] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a294341-4acf-4963-96d3-ef0fee309713 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.195848] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1867.195848] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52c1243a-787f-74c0-408d-eba6aa528f2d" [ 1867.195848] env[62525]: _type = "Task" [ 1867.195848] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.204092] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1243a-787f-74c0-408d-eba6aa528f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.239929] env[62525]: DEBUG nova.compute.manager [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Received event network-vif-plugged-4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.240170] env[62525]: DEBUG oslo_concurrency.lockutils [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.240374] env[62525]: DEBUG oslo_concurrency.lockutils [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.240541] env[62525]: DEBUG oslo_concurrency.lockutils [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.240877] env[62525]: DEBUG nova.compute.manager [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] No waiting events found dispatching network-vif-plugged-4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1867.241115] env[62525]: WARNING nova.compute.manager [req-96ef465f-9ac8-4c35-8d3b-b126eb45c03c req-b5705471-cbee-4023-9357-b63b64931f1a service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Received unexpected event network-vif-plugged-4f9698c8-b319-4565-b308-ac2450ee865f for instance with vm_state building and task_state spawning. [ 1867.325524] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Successfully updated port: 4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1867.609919] env[62525]: DEBUG nova.network.neutron [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Port d060e85e-b0a9-45db-8fb4-2994f45e01f5 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1867.708889] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52c1243a-787f-74c0-408d-eba6aa528f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.011973} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.709189] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.709448] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 52341e2f-b556-4f84-b60e-16a3e71df504/52341e2f-b556-4f84-b60e-16a3e71df504.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1867.709707] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3919a656-e93a-4f90-98cd-65061a555c18 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.718526] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1867.718526] env[62525]: value = "task-1782220" [ 1867.718526] env[62525]: _type = "Task" [ 1867.718526] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.730773] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.735533] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0e0985-247a-4375-ac7e-d8b2e599bc4c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.742303] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b8f419-4660-46ec-a5f3-d21ecc5c5cd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.775328] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddd9536-ed0a-4bc2-aebb-d1935640dd2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.783137] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e2914b-f497-41c0-b23e-a0ff648f2adb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.797486] env[62525]: DEBUG nova.compute.provider_tree [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.828330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.828473] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.828616] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1868.229401] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43995} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.229732] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 52341e2f-b556-4f84-b60e-16a3e71df504/52341e2f-b556-4f84-b60e-16a3e71df504.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1868.229988] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1868.230291] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-905c23fc-fc30-4b92-a5d9-31561cb3fd8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.237432] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1868.237432] env[62525]: value = "task-1782221" [ 1868.237432] env[62525]: _type = "Task" [ 1868.237432] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.244864] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.300992] env[62525]: DEBUG nova.scheduler.client.report [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.357206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.357384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.368898] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1868.503680] env[62525]: DEBUG nova.network.neutron [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating instance_info_cache with network_info: [{"id": "4f9698c8-b319-4565-b308-ac2450ee865f", "address": "fa:16:3e:4d:c3:e8", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f9698c8-b3", "ovs_interfaceid": "4f9698c8-b319-4565-b308-ac2450ee865f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.628851] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.629098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.629098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.746718] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063657} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.746995] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1868.747780] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975fcfa9-9661-4fa6-a9d4-7bc798de18e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.770821] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 52341e2f-b556-4f84-b60e-16a3e71df504/52341e2f-b556-4f84-b60e-16a3e71df504.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1868.771425] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9ecfc69-752a-40e6-916e-f2fa6ef10f66 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.791800] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1868.791800] env[62525]: value = "task-1782222" [ 1868.791800] env[62525]: _type = "Task" [ 1868.791800] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.799771] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.862143] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1869.006328] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.006699] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Instance network_info: |[{"id": "4f9698c8-b319-4565-b308-ac2450ee865f", "address": "fa:16:3e:4d:c3:e8", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f9698c8-b3", "ovs_interfaceid": "4f9698c8-b319-4565-b308-ac2450ee865f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1869.007174] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:c3:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afd3feb3-ffcc-4499-a2c2-eb6a48aefde9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f9698c8-b319-4565-b308-ac2450ee865f', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.015635] env[62525]: DEBUG oslo.service.loopingcall [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.015988] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1869.016329] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1ef301b-9f9c-49ee-a94a-12c612136d4f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.042225] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1869.042225] env[62525]: value = "task-1782223" [ 1869.042225] env[62525]: _type = "Task" [ 1869.042225] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.051702] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782223, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.303956] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782222, 'name': ReconfigVM_Task, 'duration_secs': 0.305716} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.304936] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 52341e2f-b556-4f84-b60e-16a3e71df504/52341e2f-b556-4f84-b60e-16a3e71df504.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1869.305772] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e6a3794-3a97-46d6-97f7-4af9ba6daeea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.311502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.730s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.318716] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.224s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.319239] env[62525]: DEBUG nova.objects.instance [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lazy-loading 'resources' on Instance uuid 13020656-4e4f-40ee-a77a-fd64ae340e09 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.323928] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1869.323928] env[62525]: value = "task-1782224" [ 1869.323928] env[62525]: _type = "Task" [ 1869.323928] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.342787] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782224, 'name': Rename_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.375701] env[62525]: DEBUG nova.compute.manager [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Received event network-changed-4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1869.375960] env[62525]: DEBUG nova.compute.manager [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Refreshing instance network info cache due to event network-changed-4f9698c8-b319-4565-b308-ac2450ee865f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1869.376289] env[62525]: DEBUG oslo_concurrency.lockutils [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] Acquiring lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.376512] env[62525]: DEBUG oslo_concurrency.lockutils [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] Acquired lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.377237] env[62525]: DEBUG nova.network.neutron [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Refreshing network info cache for port 4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1869.389761] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.538352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.538632] env[62525]: DEBUG oslo_concurrency.lockutils [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.551827] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782223, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.667539] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.667811] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.668029] env[62525]: DEBUG nova.network.neutron [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.835420] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782224, 'name': Rename_Task, 'duration_secs': 0.140134} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.835695] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1869.835939] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e888fa2f-ff14-4cd5-8472-512e0c312bdb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.842753] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1869.842753] env[62525]: value = "task-1782225" [ 1869.842753] env[62525]: _type = "Task" [ 1869.842753] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.851235] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.970274] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f41920-6b83-493b-bd84-1d45f24ef8e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.982544] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaaf7ce-7c3a-4c37-94f0-897a5aeb66af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.017294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5167f2-d777-4673-9833-5964ed73c622 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.024897] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3216dc2d-dba5-4b08-a684-7c5d3b9f463a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.037920] env[62525]: DEBUG nova.compute.provider_tree [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.041133] env[62525]: INFO nova.compute.manager [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Detaching volume aa014b16-de19-45f8-9702-f93bf9cafd8f [ 1870.053765] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782223, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.085028] env[62525]: INFO nova.virt.block_device [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Attempting to driver detach volume aa014b16-de19-45f8-9702-f93bf9cafd8f from mountpoint /dev/sdb [ 1870.085028] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1870.085028] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369843', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'name': 'volume-aa014b16-de19-45f8-9702-f93bf9cafd8f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'cb043ab8-dff7-48c6-b50b-a4d77a01eb41', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'serial': 'aa014b16-de19-45f8-9702-f93bf9cafd8f'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1870.085028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca1a562-f814-4003-9f1a-81a8708e88d1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.111248] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c58d249-f4e9-44c0-91d7-072a646c6b82 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.118455] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f8ec29-4510-4b59-a249-27b2fff4a78c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.140246] env[62525]: DEBUG nova.network.neutron [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updated VIF entry in instance network info cache for port 4f9698c8-b319-4565-b308-ac2450ee865f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1870.140609] env[62525]: DEBUG nova.network.neutron [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating instance_info_cache with network_info: [{"id": "4f9698c8-b319-4565-b308-ac2450ee865f", "address": "fa:16:3e:4d:c3:e8", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f9698c8-b3", "ovs_interfaceid": "4f9698c8-b319-4565-b308-ac2450ee865f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.142335] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91de8464-ccca-4dfe-8b70-a14ad9523982 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.159103] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] The volume has not been displaced from its original location: [datastore1] volume-aa014b16-de19-45f8-9702-f93bf9cafd8f/volume-aa014b16-de19-45f8-9702-f93bf9cafd8f.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1870.164480] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfiguring VM instance instance-0000004a to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1870.165378] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01c842ee-aa45-4cb7-be50-1a6fcbff7af6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.185796] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1870.185796] env[62525]: value = "task-1782226" [ 1870.185796] env[62525]: _type = "Task" [ 1870.185796] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.193822] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.354110] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782225, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.381674] env[62525]: DEBUG nova.network.neutron [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.541067] env[62525]: DEBUG nova.scheduler.client.report [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.554071] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782223, 'name': CreateVM_Task, 'duration_secs': 1.3922} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.554071] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1870.561713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.561891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.562237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.562741] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b70183-385f-48a6-bd8d-2a3afe41bfd3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.568203] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1870.568203] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]529cdf24-5ea3-b84c-6155-402307c9fa69" [ 1870.568203] env[62525]: _type = "Task" [ 1870.568203] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.575910] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cdf24-5ea3-b84c-6155-402307c9fa69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.645783] env[62525]: DEBUG oslo_concurrency.lockutils [req-5729e669-85da-471d-b3d3-8b329f7b7a49 req-1252bbb7-4041-4b81-87de-0fa6353760a2 service nova] Releasing lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.696156] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782226, 'name': ReconfigVM_Task, 'duration_secs': 0.212835} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.696451] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Reconfigured VM instance instance-0000004a to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1870.701983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc725a47-d63a-4d8a-8d42-06b7ff44f4c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.717960] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1870.717960] env[62525]: value = "task-1782227" [ 1870.717960] env[62525]: _type = "Task" [ 1870.717960] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.726269] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.856383] env[62525]: DEBUG oslo_vmware.api [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782225, 'name': PowerOnVM_Task, 'duration_secs': 0.625039} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.856735] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1870.857054] env[62525]: INFO nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1870.857201] env[62525]: DEBUG nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1870.858026] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d9bca5-3eb9-4df6-a3e3-37605edea533 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.871676] env[62525]: INFO nova.compute.manager [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Swapping old allocation on dict_keys(['bb89c0ac-8f56-43c6-9f73-fd897be63424']) held by migration 0efe3fee-d92f-4629-8cc2-120967622612 for instance [ 1870.885179] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.894937] env[62525]: DEBUG nova.scheduler.client.report [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Overwriting current allocation {'allocations': {'bb89c0ac-8f56-43c6-9f73-fd897be63424': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 158}}, 'project_id': '3900af0b29fa40beb95a4260054c8e5b', 'user_id': 'b6f6e065dce947b2a31313b33a08132c', 'consumer_generation': 1} on consumer e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0 {{(pid=62525) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1870.976892] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.977153] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.977366] env[62525]: DEBUG nova.network.neutron [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1871.049631] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.051960] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.662s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.053450] env[62525]: INFO nova.compute.claims [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.066958] env[62525]: INFO nova.scheduler.client.report [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleted allocations for instance 13020656-4e4f-40ee-a77a-fd64ae340e09 [ 1871.081113] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]529cdf24-5ea3-b84c-6155-402307c9fa69, 'name': SearchDatastore_Task, 'duration_secs': 0.009605} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.081409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.081696] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1871.081933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.082100] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.082281] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1871.082534] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57b0672c-a133-4c1d-a909-2a1ec9f49425 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.091483] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1871.091742] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1871.092454] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b3de85b-3343-410f-9ef9-150248fcfe6d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.098284] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1871.098284] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52def781-aa81-d328-9912-e0ed44468d72" [ 1871.098284] env[62525]: _type = "Task" [ 1871.098284] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.105968] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52def781-aa81-d328-9912-e0ed44468d72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.227846] env[62525]: DEBUG oslo_vmware.api [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782227, 'name': ReconfigVM_Task, 'duration_secs': 0.226165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.228161] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369843', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'name': 'volume-aa014b16-de19-45f8-9702-f93bf9cafd8f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'cb043ab8-dff7-48c6-b50b-a4d77a01eb41', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa014b16-de19-45f8-9702-f93bf9cafd8f', 'serial': 'aa014b16-de19-45f8-9702-f93bf9cafd8f'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1871.376265] env[62525]: INFO nova.compute.manager [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Took 17.67 seconds to build instance. [ 1871.408551] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f3eb01-d2bd-4d31-8236-0db399b87ec1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.428820] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e6ed23-578a-44d7-a1fa-021e37cbbe72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.436383] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.577933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-eaf4f003-bee2-4f9a-a553-5b4032326fc7 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "13020656-4e4f-40ee-a77a-fd64ae340e09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.544s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.611460] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52def781-aa81-d328-9912-e0ed44468d72, 'name': SearchDatastore_Task, 'duration_secs': 0.009053} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.612614] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e95dc85b-bae9-4a27-8abb-b813cde01c89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.620872] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1871.620872] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]520bcdd1-91b6-6819-94f4-4f8d9df8b81a" [ 1871.620872] env[62525]: _type = "Task" [ 1871.620872] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.629719] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520bcdd1-91b6-6819-94f4-4f8d9df8b81a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.717901] env[62525]: DEBUG nova.network.neutron [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [{"id": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "address": "fa:16:3e:35:78:f0", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap931ab189-c4", "ovs_interfaceid": "931ab189-c48d-469b-8776-5e4d3c8cf77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.769310] env[62525]: DEBUG nova.objects.instance [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'flavor' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.878234] env[62525]: DEBUG oslo_concurrency.lockutils [None req-1fb439df-5a3e-4300-b855-65730533865b tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.179s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.942810] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1871.943138] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50dd4e08-a07c-4aa8-b80d-d64223d17e37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.950812] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1871.950812] env[62525]: value = "task-1782228" [ 1871.950812] env[62525]: _type = "Task" [ 1871.950812] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.960463] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.108137] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "52341e2f-b556-4f84-b60e-16a3e71df504" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.108435] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.108653] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.108850] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.109078] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.111317] env[62525]: INFO nova.compute.manager [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Terminating instance [ 1872.113230] env[62525]: DEBUG nova.compute.manager [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1872.113420] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1872.114302] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd4b625-919f-4cfa-849e-8c2552490c70 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.122456] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1872.127620] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02b9c8ce-e661-4567-9d19-358cccc20e2e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.134820] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]520bcdd1-91b6-6819-94f4-4f8d9df8b81a, 'name': SearchDatastore_Task, 'duration_secs': 0.011428} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.135967] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.136244] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fd078815-58e6-4a3a-9da8-dd5324ea76b8/fd078815-58e6-4a3a-9da8-dd5324ea76b8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1872.136634] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1872.136634] env[62525]: value = "task-1782229" [ 1872.136634] env[62525]: _type = "Task" [ 1872.136634] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.136856] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33e302a9-c916-4295-ac06-19807a816532 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.149339] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.150588] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1872.150588] env[62525]: value = "task-1782230" [ 1872.150588] env[62525]: _type = "Task" [ 1872.150588] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.158039] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.219471] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.220135] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1872.220394] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4280f4a-41ac-41d3-807c-b7cc1c99b1dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.223804] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b6950f-2f74-40cf-a4d6-3eb3853eb7e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.232273] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff35a9c8-86c9-428e-8e5d-fc7b1277540d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.236869] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1872.236869] env[62525]: value = "task-1782231" [ 1872.236869] env[62525]: _type = "Task" [ 1872.236869] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.272100] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80659f2-5705-4736-86d0-94620e40712f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.279917] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.287345] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fa8318-17c2-4e9c-bff3-4a6e42e3d2ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.306236] env[62525]: DEBUG nova.compute.provider_tree [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.461615] env[62525]: DEBUG oslo_vmware.api [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782228, 'name': PowerOnVM_Task, 'duration_secs': 0.493761} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.461891] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1872.462100] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca0c66f-1e1a-4c5d-8e4e-422c5adb3710 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance 'c4e31de8-0b94-4fea-aa30-8af5608d257a' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1872.648030] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782229, 'name': PowerOffVM_Task, 'duration_secs': 0.174167} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.648321] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1872.648488] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1872.648761] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ded04818-1cd3-4261-8a02-0765c0c85a16 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.659245] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417729} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.659473] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] fd078815-58e6-4a3a-9da8-dd5324ea76b8/fd078815-58e6-4a3a-9da8-dd5324ea76b8.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1872.659674] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1872.659897] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22ceb6f6-1706-4781-9f25-205836dcf2dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.666435] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1872.666435] env[62525]: value = "task-1782233" [ 1872.666435] env[62525]: _type = "Task" [ 1872.666435] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.674593] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782233, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.747018] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782231, 'name': PowerOffVM_Task, 'duration_secs': 0.190565} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.747331] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1872.747978] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1872.748216] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1872.748372] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1872.748556] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1872.748704] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1872.748850] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1872.749063] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1872.749228] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1872.749395] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1872.749562] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1872.749731] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1872.754722] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d963c837-9e23-4c16-9030-c7333c91e16d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.770790] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1872.770790] env[62525]: value = "task-1782234" [ 1872.770790] env[62525]: _type = "Task" [ 1872.770790] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.775024] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1872.775268] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1872.775471] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] 52341e2f-b556-4f84-b60e-16a3e71df504 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1872.776146] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f5d159e-479e-46b3-8359-bfb52500d9dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.781128] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.782935] env[62525]: DEBUG oslo_concurrency.lockutils [None req-06fb9204-a514-4ae3-8190-48bdb733b56b tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.244s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.785168] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1872.785168] env[62525]: value = "task-1782235" [ 1872.785168] env[62525]: _type = "Task" [ 1872.785168] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.793136] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.811014] env[62525]: DEBUG nova.scheduler.client.report [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1873.023040] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.023322] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.023722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.023722] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.023867] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.025908] env[62525]: INFO nova.compute.manager [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Terminating instance [ 1873.027616] env[62525]: DEBUG nova.compute.manager [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.027808] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.029198] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6e86ff-7390-4b47-ae6b-deda81caba42 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.041403] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.041657] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b92c944-32e0-4932-a0e7-d16b2b15714c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.047979] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1873.047979] env[62525]: value = "task-1782236" [ 1873.047979] env[62525]: _type = "Task" [ 1873.047979] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.056454] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.178096] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782233, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059467} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.178528] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1873.179435] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56631893-c667-4162-94da-687fd2d39ecc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.202751] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] fd078815-58e6-4a3a-9da8-dd5324ea76b8/fd078815-58e6-4a3a-9da8-dd5324ea76b8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.203098] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ada1fbc3-ae61-4950-b20a-a86773cfc5e2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.222224] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1873.222224] env[62525]: value = "task-1782237" [ 1873.222224] env[62525]: _type = "Task" [ 1873.222224] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.230212] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.281184] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782234, 'name': ReconfigVM_Task, 'duration_secs': 0.15586} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.282134] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9212b1-ca64-440d-bfe4-177b4ad5c6c3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.302928] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1873.303142] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1873.303301] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1873.303482] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1873.303628] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1873.303775] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1873.304011] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1873.304238] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1873.304346] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1873.304508] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1873.304681] env[62525]: DEBUG nova.virt.hardware [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1873.305915] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f731b46-91d8-4b09-98b9-f7dc3b308d61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.310995] env[62525]: DEBUG oslo_vmware.api [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157752} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.311561] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1873.311781] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1873.312040] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1873.312209] env[62525]: INFO nova.compute.manager [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1873.312448] env[62525]: DEBUG oslo.service.loopingcall [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.312641] env[62525]: DEBUG nova.compute.manager [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1873.312758] env[62525]: DEBUG nova.network.neutron [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1873.315811] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.316295] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1873.318874] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1873.318874] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5295dd29-86ee-a0fc-e142-80fe7c2c29f2" [ 1873.318874] env[62525]: _type = "Task" [ 1873.318874] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.328082] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5295dd29-86ee-a0fc-e142-80fe7c2c29f2, 'name': SearchDatastore_Task, 'duration_secs': 0.007982} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.333587] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1873.333864] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71276bd9-e4f3-49f2-85b7-55abcc8d8012 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.353192] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1873.353192] env[62525]: value = "task-1782238" [ 1873.353192] env[62525]: _type = "Task" [ 1873.353192] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.361915] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782238, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.560764] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782236, 'name': PowerOffVM_Task, 'duration_secs': 0.194201} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.562791] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1873.563065] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1873.563415] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23f77bfb-1665-4996-a9e1-014455af4a25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.584340] env[62525]: DEBUG nova.compute.manager [req-08cf172c-88d1-42c3-a28b-5c9131199882 req-532f2dc8-f14f-4cdf-a659-855ce0c9ed81 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Received event network-vif-deleted-f44dec54-aad8-44f9-a6d0-8a6985797a65 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1873.584618] env[62525]: INFO nova.compute.manager [req-08cf172c-88d1-42c3-a28b-5c9131199882 req-532f2dc8-f14f-4cdf-a659-855ce0c9ed81 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Neutron deleted interface f44dec54-aad8-44f9-a6d0-8a6985797a65; detaching it from the instance and deleting it from the info cache [ 1873.584709] env[62525]: DEBUG nova.network.neutron [req-08cf172c-88d1-42c3-a28b-5c9131199882 req-532f2dc8-f14f-4cdf-a659-855ce0c9ed81 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.660746] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1873.660987] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1873.661166] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleting the datastore file [datastore1] 42d14e44-44d6-46de-84e3-049a2d7e84f3 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1873.661421] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8be75d5-b86d-4e40-bc7b-0defd5282f60 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.667711] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for the task: (returnval){ [ 1873.667711] env[62525]: value = "task-1782240" [ 1873.667711] env[62525]: _type = "Task" [ 1873.667711] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.675637] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.732645] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782237, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.821358] env[62525]: DEBUG nova.compute.utils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.823059] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1873.823189] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1873.862180] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782238, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.863595] env[62525]: DEBUG nova.policy [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83bf79d024f345a9a8c02004f8cefbaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab7fca262814290a975bf85badc9b71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1873.879138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.879410] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.879624] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.879840] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.880018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.882134] env[62525]: INFO nova.compute.manager [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Terminating instance [ 1873.883893] env[62525]: DEBUG nova.compute.manager [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.884164] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.884972] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c984a650-2088-42bf-842b-1db9d6722826 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.892646] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.893228] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1687153b-4cba-45b2-8bf3-0fe9b1286ac0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.898753] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1873.898753] env[62525]: value = "task-1782241" [ 1873.898753] env[62525]: _type = "Task" [ 1873.898753] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.907408] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.064697] env[62525]: DEBUG nova.network.neutron [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.088803] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d82a6c0c-0352-4347-871a-eed74608a8a2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.097604] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952330f3-5f6c-409c-9b15-b0fac256a1d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.135624] env[62525]: DEBUG nova.compute.manager [req-08cf172c-88d1-42c3-a28b-5c9131199882 req-532f2dc8-f14f-4cdf-a659-855ce0c9ed81 service nova] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Detach interface failed, port_id=f44dec54-aad8-44f9-a6d0-8a6985797a65, reason: Instance 52341e2f-b556-4f84-b60e-16a3e71df504 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1874.140573] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Successfully created port: 386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1874.178684] env[62525]: DEBUG oslo_vmware.api [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Task: {'id': task-1782240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133537} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.178991] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.179213] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.179416] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.179610] env[62525]: INFO nova.compute.manager [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1874.180079] env[62525]: DEBUG oslo.service.loopingcall [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.180332] env[62525]: DEBUG nova.compute.manager [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.180438] env[62525]: DEBUG nova.network.neutron [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.239375] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782237, 'name': ReconfigVM_Task, 'duration_secs': 0.757696} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.239706] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfigured VM instance instance-00000073 to attach disk [datastore1] fd078815-58e6-4a3a-9da8-dd5324ea76b8/fd078815-58e6-4a3a-9da8-dd5324ea76b8.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.240881] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e317463b-cb5a-4a7d-a5fc-65feefad4bc2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.253223] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1874.253223] env[62525]: value = "task-1782242" [ 1874.253223] env[62525]: _type = "Task" [ 1874.253223] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.259506] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782242, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.326148] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1874.364447] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782238, 'name': ReconfigVM_Task, 'duration_secs': 0.51243} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.365716] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1874.366686] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bdcc60-0bf5-4021-92c5-47df54c5bc28 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.394090] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1874.394852] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-232179e5-bf04-4c41-9373-347b1fcdb2ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.418145] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782241, 'name': PowerOffVM_Task, 'duration_secs': 0.318836} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.422020] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1874.422020] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1874.422020] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1874.422020] env[62525]: value = "task-1782243" [ 1874.422020] env[62525]: _type = "Task" [ 1874.422020] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.422020] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1162862-facc-4697-9f86-f4e5296c149b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.432043] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782243, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.566795] env[62525]: INFO nova.compute.manager [-] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Took 1.25 seconds to deallocate network for instance. [ 1874.616387] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.616387] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.616387] env[62525]: DEBUG nova.compute.manager [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Going to confirm migration 7 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1874.635305] env[62525]: DEBUG nova.compute.manager [req-b51d0ac1-9493-4bde-a845-506edf0f2e48 req-960cf82b-edb2-43a1-b2f5-6701e525955d service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Received event network-vif-deleted-5e23aff4-06c8-4549-b425-5b83423352ce {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1874.635547] env[62525]: INFO nova.compute.manager [req-b51d0ac1-9493-4bde-a845-506edf0f2e48 req-960cf82b-edb2-43a1-b2f5-6701e525955d service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Neutron deleted interface 5e23aff4-06c8-4549-b425-5b83423352ce; detaching it from the instance and deleting it from the info cache [ 1874.635547] env[62525]: DEBUG nova.network.neutron [req-b51d0ac1-9493-4bde-a845-506edf0f2e48 req-960cf82b-edb2-43a1-b2f5-6701e525955d service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.640158] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.640366] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.640550] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.641024] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4038e0d-0aa2-478f-a395-6b8ea0b61d0e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.648311] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1874.648311] env[62525]: value = "task-1782245" [ 1874.648311] env[62525]: _type = "Task" [ 1874.648311] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.658304] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.761190] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782242, 'name': Rename_Task, 'duration_secs': 0.138748} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.761467] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1874.761769] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3bef815-360d-40c0-821b-5806bd2abc44 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.768582] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1874.768582] env[62525]: value = "task-1782246" [ 1874.768582] env[62525]: _type = "Task" [ 1874.768582] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.776468] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.934305] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782243, 'name': ReconfigVM_Task, 'duration_secs': 0.27565} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.934618] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Reconfigured VM instance instance-00000066 to attach disk [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0/e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.935435] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785aa31d-bfc5-4cd3-9420-9430fa70980b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.954808] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4238ea-dc5e-4922-807f-3204f605eac7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.975214] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ab0e5-f8a3-48ff-a899-79572ba5d6e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.994704] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f44733-3b6c-4a3d-823a-f7ade0a04341 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.002455] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1875.002743] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4e7c10a-fc4e-4e26-a733-b55cefde80f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.009708] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1875.009708] env[62525]: value = "task-1782247" [ 1875.009708] env[62525]: _type = "Task" [ 1875.009708] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.017201] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.073582] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.073854] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.074103] env[62525]: DEBUG nova.objects.instance [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid 52341e2f-b556-4f84-b60e-16a3e71df504 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.108629] env[62525]: DEBUG nova.network.neutron [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.139425] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e9ac90e-ebf9-4ab3-8f38-dd2c5d997ff8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.149597] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71d3029-7d36-4823-9000-33816763818c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.171064] env[62525]: DEBUG oslo_vmware.api [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147374} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.171064] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1875.171064] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1875.171438] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1875.171438] env[62525]: INFO nova.compute.manager [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1875.171538] env[62525]: DEBUG oslo.service.loopingcall [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1875.171762] env[62525]: DEBUG nova.compute.manager [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1875.172106] env[62525]: DEBUG nova.network.neutron [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1875.184092] env[62525]: DEBUG nova.compute.manager [req-b51d0ac1-9493-4bde-a845-506edf0f2e48 req-960cf82b-edb2-43a1-b2f5-6701e525955d service nova] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Detach interface failed, port_id=5e23aff4-06c8-4549-b425-5b83423352ce, reason: Instance 42d14e44-44d6-46de-84e3-049a2d7e84f3 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1875.185324] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.185493] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.185658] env[62525]: DEBUG nova.network.neutron [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1875.185843] env[62525]: DEBUG nova.objects.instance [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'info_cache' on Instance uuid c4e31de8-0b94-4fea-aa30-8af5608d257a {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.279705] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782246, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.337704] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1875.363589] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1875.363838] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1875.363995] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.364192] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1875.364341] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.364489] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1875.364695] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1875.364858] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1875.365033] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1875.365210] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1875.365379] env[62525]: DEBUG nova.virt.hardware [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1875.366352] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35e095c-f9fa-4f44-b2ac-aa63529fba55 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.376137] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cd12cf-876c-45c2-b24a-0fb01f3fed26 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.520609] env[62525]: DEBUG oslo_vmware.api [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782247, 'name': PowerOnVM_Task, 'duration_secs': 0.406331} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.520941] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1875.611470] env[62525]: INFO nova.compute.manager [-] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Took 1.43 seconds to deallocate network for instance. [ 1875.694395] env[62525]: DEBUG nova.compute.manager [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Received event network-vif-plugged-386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.694617] env[62525]: DEBUG oslo_concurrency.lockutils [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] Acquiring lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.694823] env[62525]: DEBUG oslo_concurrency.lockutils [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.694988] env[62525]: DEBUG oslo_concurrency.lockutils [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.695167] env[62525]: DEBUG nova.compute.manager [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] No waiting events found dispatching network-vif-plugged-386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1875.695330] env[62525]: WARNING nova.compute.manager [req-feb7b4d0-5e48-4d6d-ad24-63e487036182 req-08acb2b5-259c-4def-98ad-28941996c272 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Received unexpected event network-vif-plugged-386f6960-c9a5-4c48-9197-bf7df64deb96 for instance with vm_state building and task_state spawning. [ 1875.745275] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b0b747-ae73-4bc4-9dd9-c46d464dac5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.752830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2860c54e-1f73-499a-8574-56c18460558c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.787399] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b39c7f-31b4-41a1-9ec1-42f0114a3017 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.794996] env[62525]: DEBUG oslo_vmware.api [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782246, 'name': PowerOnVM_Task, 'duration_secs': 0.533007} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.797058] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1875.797279] env[62525]: INFO nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1875.797458] env[62525]: DEBUG nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1875.798268] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad068114-3288-45e3-a97e-e4298c9870b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.801634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f868ee-68e7-46fe-bf6b-7e03bc7d0d37 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.818925] env[62525]: DEBUG nova.compute.provider_tree [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.855375] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Successfully updated port: 386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1876.118070] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.329057] env[62525]: DEBUG nova.scheduler.client.report [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1876.332026] env[62525]: DEBUG nova.network.neutron [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.334812] env[62525]: INFO nova.compute.manager [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Took 16.92 seconds to build instance. [ 1876.358677] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.358796] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.358949] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.405125] env[62525]: DEBUG nova.network.neutron [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [{"id": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "address": "fa:16:3e:cb:82:81", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd060e85e-b0", "ovs_interfaceid": "d060e85e-b0a9-45db-8fb4-2994f45e01f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.532901] env[62525]: INFO nova.compute.manager [None req-9095e483-80e0-43a5-a036-aee126bc68f2 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance to original state: 'active' [ 1876.835540] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.836835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.718s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.836835] env[62525]: DEBUG nova.objects.instance [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lazy-loading 'resources' on Instance uuid 42d14e44-44d6-46de-84e3-049a2d7e84f3 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.837677] env[62525]: INFO nova.compute.manager [-] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Took 1.67 seconds to deallocate network for instance. [ 1876.838151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-de5cf6ca-131b-45d8-91a5-60cacf38c446 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.429s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.871503] env[62525]: INFO nova.scheduler.client.report [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance 52341e2f-b556-4f84-b60e-16a3e71df504 [ 1876.896864] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.908776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-c4e31de8-0b94-4fea-aa30-8af5608d257a" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.909077] env[62525]: DEBUG nova.objects.instance [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'migration_context' on Instance uuid c4e31de8-0b94-4fea-aa30-8af5608d257a {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1877.040824] env[62525]: DEBUG nova.network.neutron [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updating instance_info_cache with network_info: [{"id": "386f6960-c9a5-4c48-9197-bf7df64deb96", "address": "fa:16:3e:52:ae:8c", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f6960-c9", "ovs_interfaceid": "386f6960-c9a5-4c48-9197-bf7df64deb96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.347051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.387863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d5def2d8-d987-4cfa-8c09-a717834bd9ef tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "52341e2f-b556-4f84-b60e-16a3e71df504" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.279s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.415221] env[62525]: DEBUG nova.objects.base [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1877.416546] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecb1dc4-f097-4f90-8c7b-de7981beba7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.447421] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d073e331-75ec-4edd-b042-c8d73f6a9bd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.454016] env[62525]: DEBUG oslo_vmware.api [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1877.454016] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52835bf1-9f25-985a-8ef3-fbf1022ee673" [ 1877.454016] env[62525]: _type = "Task" [ 1877.454016] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.463098] env[62525]: DEBUG oslo_vmware.api [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52835bf1-9f25-985a-8ef3-fbf1022ee673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.496653] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be5816-636f-4a42-96d2-411dca7b26c2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.504740] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f3c69-8970-4b95-b663-ae000f7579b0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.536108] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aca2bfd-d797-41b6-b022-bc1bfa13381a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.544192] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29893036-3a18-4c49-b913-a31e96975abe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.550229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.550520] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Instance network_info: |[{"id": "386f6960-c9a5-4c48-9197-bf7df64deb96", "address": "fa:16:3e:52:ae:8c", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f6960-c9", "ovs_interfaceid": "386f6960-c9a5-4c48-9197-bf7df64deb96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1877.552556] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:ae:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '386f6960-c9a5-4c48-9197-bf7df64deb96', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.560495] env[62525]: DEBUG oslo.service.loopingcall [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.561797] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1877.574090] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aba971da-eec4-4291-bb6a-1da85bb8801f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.588533] env[62525]: DEBUG nova.compute.provider_tree [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.596333] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1877.596333] env[62525]: value = "task-1782248" [ 1877.596333] env[62525]: _type = "Task" [ 1877.596333] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.604622] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782248, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.605551] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.605789] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.605993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.606187] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.606351] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.608250] env[62525]: INFO nova.compute.manager [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Terminating instance [ 1877.609998] env[62525]: DEBUG nova.compute.manager [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1877.610216] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1877.611247] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62deb5cb-8245-49d1-afb4-ad176795bb8f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.618585] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1877.618931] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6c26560-a721-48fb-9fd1-0bf2b3ed5cc1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.625441] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1877.625441] env[62525]: value = "task-1782249" [ 1877.625441] env[62525]: _type = "Task" [ 1877.625441] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.635851] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.638829] env[62525]: DEBUG nova.compute.manager [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Received event network-changed-4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.639133] env[62525]: DEBUG nova.compute.manager [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Refreshing instance network info cache due to event network-changed-4f9698c8-b319-4565-b308-ac2450ee865f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.639256] env[62525]: DEBUG oslo_concurrency.lockutils [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] Acquiring lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.639394] env[62525]: DEBUG oslo_concurrency.lockutils [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] Acquired lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.639552] env[62525]: DEBUG nova.network.neutron [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Refreshing network info cache for port 4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.725950] env[62525]: DEBUG nova.compute.manager [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Received event network-vif-deleted-b40cb3df-4673-45d7-8b69-c642a8939d96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.725950] env[62525]: DEBUG nova.compute.manager [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Received event network-changed-386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.726185] env[62525]: DEBUG nova.compute.manager [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Refreshing instance network info cache due to event network-changed-386f6960-c9a5-4c48-9197-bf7df64deb96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.726782] env[62525]: DEBUG oslo_concurrency.lockutils [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] Acquiring lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.726985] env[62525]: DEBUG oslo_concurrency.lockutils [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] Acquired lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.727236] env[62525]: DEBUG nova.network.neutron [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Refreshing network info cache for port 386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.964589] env[62525]: DEBUG oslo_vmware.api [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52835bf1-9f25-985a-8ef3-fbf1022ee673, 'name': SearchDatastore_Task, 'duration_secs': 0.008413} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.964910] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.094570] env[62525]: DEBUG nova.scheduler.client.report [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1878.106840] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782248, 'name': CreateVM_Task, 'duration_secs': 0.357905} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.107022] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1878.107691] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.107859] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.108221] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1878.108473] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-210e967b-041d-468a-948a-655af186a64f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.113304] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1878.113304] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5250250b-80e4-03bb-a623-35eb0da5c220" [ 1878.113304] env[62525]: _type = "Task" [ 1878.113304] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.122357] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5250250b-80e4-03bb-a623-35eb0da5c220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.136420] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782249, 'name': PowerOffVM_Task, 'duration_secs': 0.259214} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.136734] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1878.136853] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1878.137158] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e14a3e2-ad2b-471a-8126-38da84a7931f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.203399] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.203742] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.234191] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.234461] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.234642] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleting the datastore file [datastore1] e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.234910] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79b7f0c8-a779-464f-9370-47971f8eb713 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.242933] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1878.242933] env[62525]: value = "task-1782251" [ 1878.242933] env[62525]: _type = "Task" [ 1878.242933] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.251330] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.603035] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.604974] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.259s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.605229] env[62525]: DEBUG nova.objects.instance [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'resources' on Instance uuid cb043ab8-dff7-48c6-b50b-a4d77a01eb41 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1878.620609] env[62525]: INFO nova.scheduler.client.report [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Deleted allocations for instance 42d14e44-44d6-46de-84e3-049a2d7e84f3 [ 1878.629224] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5250250b-80e4-03bb-a623-35eb0da5c220, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.630030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.630227] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.630457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.630604] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.630771] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.632202] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f49a45d4-416b-46ea-87d6-ea15200787c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.641060] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.641353] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.642093] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e558ee2c-11b2-4f16-a10e-c9791cd30928 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.646950] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1878.646950] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5256bcec-fe33-07e2-5977-a0096e01db50" [ 1878.646950] env[62525]: _type = "Task" [ 1878.646950] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.655670] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5256bcec-fe33-07e2-5977-a0096e01db50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.706452] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1878.737927] env[62525]: DEBUG nova.network.neutron [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updated VIF entry in instance network info cache for port 4f9698c8-b319-4565-b308-ac2450ee865f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.737927] env[62525]: DEBUG nova.network.neutron [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating instance_info_cache with network_info: [{"id": "4f9698c8-b319-4565-b308-ac2450ee865f", "address": "fa:16:3e:4d:c3:e8", "network": {"id": "cffaf623-4a42-4b01-9378-7756a30b0311", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1846231375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a46df22dac6f473b8395f9302c3a4a75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afd3feb3-ffcc-4499-a2c2-eb6a48aefde9", "external-id": "nsx-vlan-transportzone-22", "segmentation_id": 22, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f9698c8-b3", "ovs_interfaceid": "4f9698c8-b319-4565-b308-ac2450ee865f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.753494] env[62525]: DEBUG oslo_vmware.api [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145611} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.753651] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1878.753784] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1878.753965] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1878.754153] env[62525]: INFO nova.compute.manager [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1878.754388] env[62525]: DEBUG oslo.service.loopingcall [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.754572] env[62525]: DEBUG nova.compute.manager [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1878.754678] env[62525]: DEBUG nova.network.neutron [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1878.758871] env[62525]: DEBUG nova.network.neutron [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updated VIF entry in instance network info cache for port 386f6960-c9a5-4c48-9197-bf7df64deb96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.759203] env[62525]: DEBUG nova.network.neutron [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updating instance_info_cache with network_info: [{"id": "386f6960-c9a5-4c48-9197-bf7df64deb96", "address": "fa:16:3e:52:ae:8c", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f6960-c9", "ovs_interfaceid": "386f6960-c9a5-4c48-9197-bf7df64deb96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.131523] env[62525]: DEBUG oslo_concurrency.lockutils [None req-948c0556-49d3-4e8e-9617-ddb2dbc27d79 tempest-SecurityGroupsTestJSON-1269799265 tempest-SecurityGroupsTestJSON-1269799265-project-member] Lock "42d14e44-44d6-46de-84e3-049a2d7e84f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.108s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.160417] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5256bcec-fe33-07e2-5977-a0096e01db50, 'name': SearchDatastore_Task, 'duration_secs': 0.008757} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.164173] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c589093e-534f-4d07-a41e-e8fe54630d7b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.168739] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1879.168739] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5274c6ed-fcf5-1d2a-6599-1b8eb49b3751" [ 1879.168739] env[62525]: _type = "Task" [ 1879.168739] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.177028] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5274c6ed-fcf5-1d2a-6599-1b8eb49b3751, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.228445] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.240767] env[62525]: DEBUG oslo_concurrency.lockutils [req-a67e2460-c790-4722-88e2-b151b13a5225 req-5dcf1864-a05f-46a8-94c2-75cba150c78d service nova] Releasing lock "refresh_cache-fd078815-58e6-4a3a-9da8-dd5324ea76b8" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.261610] env[62525]: DEBUG oslo_concurrency.lockutils [req-41c0ba6f-bc73-41e9-8bfc-e8cc1fd4195d req-0f29b86e-b009-4984-8fb4-450d1fe6497a service nova] Releasing lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.267294] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af92f3c-a60e-4e05-ad2e-8fd9285378eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.275330] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb76243-4b82-4020-a5f5-85fd4c054839 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.307695] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaa8ce9-fb65-42f2-97ed-f2695ea08dec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.315647] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849c1873-2db0-4bb6-9583-7e7812785600 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.330827] env[62525]: DEBUG nova.compute.provider_tree [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.682689] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5274c6ed-fcf5-1d2a-6599-1b8eb49b3751, 'name': SearchDatastore_Task, 'duration_secs': 0.009676} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.683063] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.683401] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86/5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.683724] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12b5f0a9-1792-44ba-a1f9-b08fbecbc764 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.693128] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1879.693128] env[62525]: value = "task-1782252" [ 1879.693128] env[62525]: _type = "Task" [ 1879.693128] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.697862] env[62525]: DEBUG nova.network.neutron [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.705845] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.754727] env[62525]: DEBUG nova.compute.manager [req-2b6f5629-0896-4593-9338-bf8cf524c989 req-765fa192-6384-41ce-b574-c738c2473c22 service nova] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Received event network-vif-deleted-931ab189-c48d-469b-8776-5e4d3c8cf77a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1879.834383] env[62525]: DEBUG nova.scheduler.client.report [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1880.203026] env[62525]: INFO nova.compute.manager [-] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Took 1.45 seconds to deallocate network for instance. [ 1880.203425] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459881} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.205259] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86/5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1880.205259] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1880.208458] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eededd18-64f5-4cba-887b-8df87735581c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.215711] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1880.215711] env[62525]: value = "task-1782253" [ 1880.215711] env[62525]: _type = "Task" [ 1880.215711] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.226487] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.339358] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.342066] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.377s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.378839] env[62525]: INFO nova.scheduler.client.report [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted allocations for instance cb043ab8-dff7-48c6-b50b-a4d77a01eb41 [ 1880.712329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.727411] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178176} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.729277] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.730142] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961785bd-44b7-4087-84fe-ba1a41ebde28 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.758282] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86/5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.758282] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e78a957e-9815-4aeb-bf81-68afaaec17d6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.779735] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1880.779735] env[62525]: value = "task-1782254" [ 1880.779735] env[62525]: _type = "Task" [ 1880.779735] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.788752] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.889068] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c17fa01-1f0b-4524-80a7-79ac4de69a8a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "cb043ab8-dff7-48c6-b50b-a4d77a01eb41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.009s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.979402] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddd61a0-e8a7-4234-8a1f-f460ab3b0883 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.987489] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4093f23-b524-4669-a420-82a74f5a5511 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.019376] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2addb8-9552-4522-814b-471ab466f2a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.027502] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1889718e-3e92-4914-b33b-6736fa9b95fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.042930] env[62525]: DEBUG nova.compute.provider_tree [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.290638] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782254, 'name': ReconfigVM_Task, 'duration_secs': 0.509992} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.291027] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86/5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1881.291781] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0e3d5f4-5bf6-4d9b-a703-2e700098ce51 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.298608] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1881.298608] env[62525]: value = "task-1782255" [ 1881.298608] env[62525]: _type = "Task" [ 1881.298608] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.307499] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782255, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.547904] env[62525]: DEBUG nova.scheduler.client.report [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1881.808325] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782255, 'name': Rename_Task, 'duration_secs': 0.24247} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.808704] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1881.808957] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b877e814-3367-4244-b20c-403706ee9cfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.816034] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1881.816034] env[62525]: value = "task-1782256" [ 1881.816034] env[62525]: _type = "Task" [ 1881.816034] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.825609] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.332123] env[62525]: DEBUG oslo_vmware.api [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782256, 'name': PowerOnVM_Task, 'duration_secs': 0.486738} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.332123] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1882.332123] env[62525]: INFO nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1882.332123] env[62525]: DEBUG nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1882.336032] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65d84bc-2765-4895-98bf-2edf98f709ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.558247] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.216s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.564018] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.333s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.564018] env[62525]: INFO nova.compute.claims [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1882.865018] env[62525]: INFO nova.compute.manager [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Took 13.50 seconds to build instance. [ 1883.119340] env[62525]: INFO nova.scheduler.client.report [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocation for migration 437fd501-04d0-4c8e-a038-403e281e8b8e [ 1883.367695] env[62525]: DEBUG oslo_concurrency.lockutils [None req-0c791082-cf71-406e-9d44-d690bcb6828e tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.010s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.504051] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.505725] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.626634] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f4f6f8e0-59db-4ce2-ad25-8628c0a6e460 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.011s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.686204] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e5f5ad-5602-4e5a-bd0d-dff205b87abe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.693267] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ee78ea-53b4-4b61-b333-cf2adb1b9c19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.727541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7761285-92d5-4d95-b287-36a6ef0ce4c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.736018] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a90e11-83f9-459b-9d0a-7933fd74466c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.749667] env[62525]: DEBUG nova.compute.provider_tree [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.008259] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1884.255342] env[62525]: DEBUG nova.scheduler.client.report [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1884.300140] env[62525]: DEBUG nova.compute.manager [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Received event network-changed-386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1884.300382] env[62525]: DEBUG nova.compute.manager [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Refreshing instance network info cache due to event network-changed-386f6960-c9a5-4c48-9197-bf7df64deb96. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1884.300603] env[62525]: DEBUG oslo_concurrency.lockutils [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] Acquiring lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.300744] env[62525]: DEBUG oslo_concurrency.lockutils [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] Acquired lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.300902] env[62525]: DEBUG nova.network.neutron [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Refreshing network info cache for port 386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1884.532384] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.761721] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.762421] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1884.765574] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.053s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.765574] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.767714] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.235s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.768745] env[62525]: INFO nova.compute.claims [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1884.794360] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.797115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.797115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.797115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.797115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.798263] env[62525]: INFO nova.compute.manager [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Terminating instance [ 1884.800945] env[62525]: INFO nova.scheduler.client.report [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted allocations for instance e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0 [ 1884.802410] env[62525]: DEBUG nova.compute.manager [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1884.802739] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1884.806751] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf5e92e-ebe8-49b3-9438-d8cc8a5fc2fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.817987] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.818474] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d15b085-9011-458d-af06-79b41f03927d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.828141] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1884.828141] env[62525]: value = "task-1782257" [ 1884.828141] env[62525]: _type = "Task" [ 1884.828141] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.840056] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.066554] env[62525]: DEBUG nova.network.neutron [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updated VIF entry in instance network info cache for port 386f6960-c9a5-4c48-9197-bf7df64deb96. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1885.067764] env[62525]: DEBUG nova.network.neutron [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updating instance_info_cache with network_info: [{"id": "386f6960-c9a5-4c48-9197-bf7df64deb96", "address": "fa:16:3e:52:ae:8c", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f6960-c9", "ovs_interfaceid": "386f6960-c9a5-4c48-9197-bf7df64deb96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.278691] env[62525]: DEBUG nova.compute.utils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1885.280112] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1885.280288] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1885.317500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-758ace9a-53b0-4c39-8ae7-fc8eda42a06a tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.711s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.337645] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782257, 'name': PowerOffVM_Task, 'duration_secs': 0.188301} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.338279] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.338460] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1885.338808] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-435af78d-5c27-4279-888a-ad121d75dfc8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.343567] env[62525]: DEBUG nova.policy [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59a07073c7534d17bfb2013552bbe0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c56f465d1a641a99458904c04137621', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1885.429759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.430017] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.465023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1885.465023] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1885.465023] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] c4e31de8-0b94-4fea-aa30-8af5608d257a {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1885.465023] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-751b1b89-b813-4f87-bbec-fa6a35ed6248 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.470046] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1885.470046] env[62525]: value = "task-1782259" [ 1885.470046] env[62525]: _type = "Task" [ 1885.470046] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.481440] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.569351] env[62525]: DEBUG oslo_concurrency.lockutils [req-91cf83ab-d67c-41f5-8578-ed3be3cb7d60 req-9353e8d5-6f74-4ce2-89e9-e6a02cd355b9 service nova] Releasing lock "refresh_cache-5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.673419] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Successfully created port: 4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1885.735030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.735030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.786076] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1885.918746] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3f0b74-1303-4d05-907f-6eacb24c3ba1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.926897] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17c2b4b-700d-4802-9d9c-4645fe5ec94a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.932780] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1885.963950] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f477f4-fb61-4e53-9f14-1cb47d9c3657 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.976481] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7b6961-8a83-4509-bf0b-980c4e8c9299 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.986691] env[62525]: DEBUG oslo_vmware.api [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186187} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.996744] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1885.997123] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1885.997435] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1885.997730] env[62525]: INFO nova.compute.manager [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1885.998172] env[62525]: DEBUG oslo.service.loopingcall [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.999765] env[62525]: DEBUG nova.compute.provider_tree [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1886.001625] env[62525]: DEBUG nova.compute.manager [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1886.001783] env[62525]: DEBUG nova.network.neutron [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1886.170738] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "abd538d5-f433-4896-9871-5cdef303cda0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.171025] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.236257] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1886.302556] env[62525]: DEBUG nova.compute.manager [req-8b18cc78-b1df-4c76-9c20-e71546e9b494 req-696c7b90-cc06-4684-91bb-7d02b21bdcca service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Received event network-vif-deleted-d060e85e-b0a9-45db-8fb4-2994f45e01f5 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1886.302875] env[62525]: INFO nova.compute.manager [req-8b18cc78-b1df-4c76-9c20-e71546e9b494 req-696c7b90-cc06-4684-91bb-7d02b21bdcca service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Neutron deleted interface d060e85e-b0a9-45db-8fb4-2994f45e01f5; detaching it from the instance and deleting it from the info cache [ 1886.302922] env[62525]: DEBUG nova.network.neutron [req-8b18cc78-b1df-4c76-9c20-e71546e9b494 req-696c7b90-cc06-4684-91bb-7d02b21bdcca service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.485610] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.505978] env[62525]: DEBUG nova.scheduler.client.report [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1886.673891] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1886.756718] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.779787] env[62525]: DEBUG nova.network.neutron [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.795211] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1886.805608] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-987f2813-45c0-435e-9cac-0efbdd6f68cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.816983] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b70556-52e5-40b0-883d-cfed1467fa19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.837075] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1886.837315] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1886.837499] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1886.837750] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1886.837908] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1886.838077] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1886.838294] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1886.838457] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1886.838623] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1886.838787] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1886.840054] env[62525]: DEBUG nova.virt.hardware [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1886.840054] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f75978-ce25-4a94-9139-25e9bd8ebc96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.850179] env[62525]: DEBUG nova.compute.manager [req-8b18cc78-b1df-4c76-9c20-e71546e9b494 req-696c7b90-cc06-4684-91bb-7d02b21bdcca service nova] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Detach interface failed, port_id=d060e85e-b0a9-45db-8fb4-2994f45e01f5, reason: Instance c4e31de8-0b94-4fea-aa30-8af5608d257a could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1886.858223] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fee7e70-a6e6-424d-9029-3e2bba2e2eee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.011708] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.012211] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1887.015276] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.530s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.016723] env[62525]: INFO nova.compute.claims [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1887.190936] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Successfully updated port: 4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1887.205883] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.283063] env[62525]: INFO nova.compute.manager [-] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Took 1.28 seconds to deallocate network for instance. [ 1887.521592] env[62525]: DEBUG nova.compute.utils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1887.525025] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1887.525209] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1887.567182] env[62525]: DEBUG nova.policy [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9848bb0f47541f48af2c808646a09b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '209b99adb38b4c8b9e5a277019dbe292', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1887.694757] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.694917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.695087] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1887.792059] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.824289] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Successfully created port: bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1888.026824] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1888.166349] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc0632f-958e-45b8-8eeb-ef377efeca76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.174578] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dea9f8-9e41-440e-9116-5ab4549b8558 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.206837] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150c9d2b-26aa-442b-890b-f21f7ae64cb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.214482] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af913f3-943f-4220-b554-1504b7c5f3e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.228224] env[62525]: DEBUG nova.compute.provider_tree [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1888.236199] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1888.329746] env[62525]: DEBUG nova.compute.manager [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Received event network-vif-plugged-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1888.329746] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Acquiring lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.330048] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.330048] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.330191] env[62525]: DEBUG nova.compute.manager [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] No waiting events found dispatching network-vif-plugged-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1888.330337] env[62525]: WARNING nova.compute.manager [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Received unexpected event network-vif-plugged-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c for instance with vm_state building and task_state spawning. [ 1888.330494] env[62525]: DEBUG nova.compute.manager [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Received event network-changed-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1888.330646] env[62525]: DEBUG nova.compute.manager [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Refreshing instance network info cache due to event network-changed-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1888.330810] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Acquiring lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.368272] env[62525]: DEBUG nova.network.neutron [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Updating instance_info_cache with network_info: [{"id": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "address": "fa:16:3e:42:ec:3c", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b806ffd-ef", "ovs_interfaceid": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.730866] env[62525]: DEBUG nova.scheduler.client.report [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1888.870894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.871430] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Instance network_info: |[{"id": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "address": "fa:16:3e:42:ec:3c", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b806ffd-ef", "ovs_interfaceid": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1888.871862] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Acquired lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.872226] env[62525]: DEBUG nova.network.neutron [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Refreshing network info cache for port 4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1888.874107] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:ec:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b806ffd-ef3b-46ce-b9af-cd4758d29d4c', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1888.886428] env[62525]: DEBUG oslo.service.loopingcall [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1888.890280] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1888.890874] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03ccd5ed-44ec-4996-a9f9-0c5af1f79dc4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.919150] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1888.919150] env[62525]: value = "task-1782261" [ 1888.919150] env[62525]: _type = "Task" [ 1888.919150] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.929475] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782261, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.039940] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1889.065593] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1889.065851] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1889.066015] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1889.066224] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1889.066354] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1889.066518] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1889.066755] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1889.066907] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1889.067098] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1889.067430] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1889.067430] env[62525]: DEBUG nova.virt.hardware [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1889.068291] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35190281-43d3-42b0-9bd4-778c896374c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.076500] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd47b93-9bc7-44e9-9921-356d71b046a8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.150827] env[62525]: DEBUG nova.network.neutron [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Updated VIF entry in instance network info cache for port 4b806ffd-ef3b-46ce-b9af-cd4758d29d4c. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1889.151228] env[62525]: DEBUG nova.network.neutron [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Updating instance_info_cache with network_info: [{"id": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "address": "fa:16:3e:42:ec:3c", "network": {"id": "b760cd9f-c08c-4c53-a2f6-eb772350b5ab", "bridge": "br-int", "label": "tempest-ServersTestJSON-1387528456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c56f465d1a641a99458904c04137621", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b806ffd-ef", "ovs_interfaceid": "4b806ffd-ef3b-46ce-b9af-cd4758d29d4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.239301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.239301] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1889.240301] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.484s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.243970] env[62525]: INFO nova.compute.claims [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1889.346746] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Successfully updated port: bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1889.429016] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782261, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.654461] env[62525]: DEBUG oslo_concurrency.lockutils [req-a85293d6-9423-493e-9b91-131d60cd00ee req-94a19b7b-d495-4148-93b6-23715f43a03d service nova] Releasing lock "refresh_cache-c802b4f6-f34b-4d40-9bba-1b6d56643b8c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.748280] env[62525]: DEBUG nova.compute.utils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1889.749828] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1889.749976] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1889.797576] env[62525]: DEBUG nova.policy [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e50433248fb4eb088e90d25fcb67c7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3d5c15d37145aa84818a2ad88f307f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1889.848835] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.848989] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.849167] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1889.929981] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782261, 'name': CreateVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.055555] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Successfully created port: 0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1890.255401] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1890.360681] env[62525]: DEBUG nova.compute.manager [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1890.361961] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.362860] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.363120] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.363403] env[62525]: DEBUG nova.compute.manager [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] No waiting events found dispatching network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1890.363648] env[62525]: WARNING nova.compute.manager [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received unexpected event network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 for instance with vm_state building and task_state spawning. [ 1890.363868] env[62525]: DEBUG nova.compute.manager [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1890.364099] env[62525]: DEBUG nova.compute.manager [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing instance network info cache due to event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1890.364334] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.390040] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1890.429697] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782261, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.433211] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a841b85f-7669-4fb0-874c-e7a5b4ab89fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.441983] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523f3bee-568b-4388-82a6-7f2136eeb300 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.477751] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0b441c-7815-45f2-bde5-ea61b61c8f22 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.485748] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e96d19-f3f2-4097-b749-7c8c513eaddd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.499929] env[62525]: DEBUG nova.compute.provider_tree [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1890.553679] env[62525]: DEBUG nova.network.neutron [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1890.930170] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782261, 'name': CreateVM_Task, 'duration_secs': 1.577326} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.930351] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1890.931047] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.931223] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.931537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1890.931784] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51ab3ec4-4287-4def-a732-a61af96ad944 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.936008] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1890.936008] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5267198c-9310-9be9-7545-cbfd1dc92f61" [ 1890.936008] env[62525]: _type = "Task" [ 1890.936008] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.943261] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5267198c-9310-9be9-7545-cbfd1dc92f61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.003520] env[62525]: DEBUG nova.scheduler.client.report [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1891.057066] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.057463] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance network_info: |[{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1891.057958] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.058167] env[62525]: DEBUG nova.network.neutron [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1891.059330] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:96:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bceaa7b6-06fc-45f3-be4d-d376a854cc39', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1891.068813] env[62525]: DEBUG oslo.service.loopingcall [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.069894] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1891.070146] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7709687-2cf1-45d5-a3f2-cab6820af25f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.091200] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1891.091200] env[62525]: value = "task-1782263" [ 1891.091200] env[62525]: _type = "Task" [ 1891.091200] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.098724] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.269532] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1891.298068] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1891.298456] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1891.298717] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1891.299054] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1891.299317] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1891.299575] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1891.299910] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1891.300195] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1891.300520] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1891.300808] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1891.301112] env[62525]: DEBUG nova.virt.hardware [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1891.302381] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264d1977-61a1-4c57-9ec4-cc70a3f62e53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.312799] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a447b2bf-84ed-4072-a187-6c0096224c95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.451438] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5267198c-9310-9be9-7545-cbfd1dc92f61, 'name': SearchDatastore_Task, 'duration_secs': 0.008918} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.451438] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.451438] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1891.451438] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.451626] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.451787] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.452095] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9651369-0121-4579-90cf-8de686066b61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.462072] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.462335] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1891.463153] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a8d452a-2ca2-408e-a3af-c5362ba99bd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.469253] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1891.469253] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522b23f9-6227-06e7-6bb5-cbd8e2f54766" [ 1891.469253] env[62525]: _type = "Task" [ 1891.469253] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.479988] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522b23f9-6227-06e7-6bb5-cbd8e2f54766, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.509117] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.509727] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1891.512733] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.307s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.514341] env[62525]: INFO nova.compute.claims [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1891.560784] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Successfully updated port: 0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1891.601875] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.787451] env[62525]: DEBUG nova.network.neutron [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updated VIF entry in instance network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1891.788064] env[62525]: DEBUG nova.network.neutron [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.979898] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522b23f9-6227-06e7-6bb5-cbd8e2f54766, 'name': SearchDatastore_Task, 'duration_secs': 0.024128} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.980679] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07e1f632-fa9e-49aa-b960-9365463b7b56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.986154] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1891.986154] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5261be59-5ebd-ab1d-ac33-c765a5b2004f" [ 1891.986154] env[62525]: _type = "Task" [ 1891.986154] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.993795] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261be59-5ebd-ab1d-ac33-c765a5b2004f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.019385] env[62525]: DEBUG nova.compute.utils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1892.022760] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1892.022917] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1892.061257] env[62525]: DEBUG nova.policy [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e20c4d99e0b4e08a3b92f274ca94354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6dbd20742b0f42d5ac04268223bfe911', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1892.064369] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.064502] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.064643] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1892.100996] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.291540] env[62525]: DEBUG oslo_concurrency.lockutils [req-f3e79d70-e47f-4b98-a5e7-aec63c776eba req-0631f141-5ead-428c-b2a8-bddd31575fed service nova] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.320451] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Successfully created port: 10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1892.498016] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261be59-5ebd-ab1d-ac33-c765a5b2004f, 'name': SearchDatastore_Task, 'duration_secs': 0.010631} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.501669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.501669] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c802b4f6-f34b-4d40-9bba-1b6d56643b8c/c802b4f6-f34b-4d40-9bba-1b6d56643b8c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1892.501669] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3b3124f-851b-4330-b932-6eeeac07ffcc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.508720] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1892.508720] env[62525]: value = "task-1782265" [ 1892.508720] env[62525]: _type = "Task" [ 1892.508720] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.517491] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.523724] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1892.610852] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.620070] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1892.684593] env[62525]: DEBUG nova.compute.manager [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Received event network-vif-plugged-0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1892.685100] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Acquiring lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.685392] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.685626] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.685853] env[62525]: DEBUG nova.compute.manager [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] No waiting events found dispatching network-vif-plugged-0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1892.686380] env[62525]: WARNING nova.compute.manager [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Received unexpected event network-vif-plugged-0972a59e-14da-461d-a9da-3400dfe9329e for instance with vm_state building and task_state spawning. [ 1892.686618] env[62525]: DEBUG nova.compute.manager [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Received event network-changed-0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1892.686891] env[62525]: DEBUG nova.compute.manager [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Refreshing instance network info cache due to event network-changed-0972a59e-14da-461d-a9da-3400dfe9329e. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1892.687289] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Acquiring lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.757789] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96972f41-2d9c-4a38-9325-2112068ce8ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.766256] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0761c2-58b3-43ea-b01b-a416afdc0b76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.805850] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd2bae5-fe55-422f-a5d1-706074572325 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.814861] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d0f290-2ebc-40b7-b4f0-bb1192def420 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.835639] env[62525]: DEBUG nova.compute.provider_tree [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.883630] env[62525]: DEBUG nova.network.neutron [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Updating instance_info_cache with network_info: [{"id": "0972a59e-14da-461d-a9da-3400dfe9329e", "address": "fa:16:3e:15:7c:65", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0972a59e-14", "ovs_interfaceid": "0972a59e-14da-461d-a9da-3400dfe9329e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.019076] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782265, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.102800] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.339540] env[62525]: DEBUG nova.scheduler.client.report [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1893.386716] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.387058] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Instance network_info: |[{"id": "0972a59e-14da-461d-a9da-3400dfe9329e", "address": "fa:16:3e:15:7c:65", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0972a59e-14", "ovs_interfaceid": "0972a59e-14da-461d-a9da-3400dfe9329e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1893.387958] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Acquired lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.388173] env[62525]: DEBUG nova.network.neutron [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Refreshing network info cache for port 0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1893.389268] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:7c:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0972a59e-14da-461d-a9da-3400dfe9329e', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1893.397111] env[62525]: DEBUG oslo.service.loopingcall [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.399937] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1893.400501] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eea3c29a-e241-48fc-8dfa-423d6a4a8519 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.420395] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1893.420395] env[62525]: value = "task-1782266" [ 1893.420395] env[62525]: _type = "Task" [ 1893.420395] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.429731] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782266, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.487133] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.487345] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.487460] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1893.519316] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574945} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.519579] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c802b4f6-f34b-4d40-9bba-1b6d56643b8c/c802b4f6-f34b-4d40-9bba-1b6d56643b8c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1893.519790] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1893.520046] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f37ca1-b610-4a11-afdb-82d68905aac3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.527318] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1893.527318] env[62525]: value = "task-1782267" [ 1893.527318] env[62525]: _type = "Task" [ 1893.527318] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.534787] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.540318] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1893.564723] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1893.564963] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1893.565150] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1893.565336] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1893.565488] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1893.565650] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1893.565851] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1893.566047] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1893.566225] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1893.566386] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1893.566570] env[62525]: DEBUG nova.virt.hardware [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1893.567442] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a561520-67fc-45af-9c3d-70ac49144db0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.575955] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce23fd3-12ad-44aa-aaa6-417f9a558ea8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.601907] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.623311] env[62525]: DEBUG nova.network.neutron [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Updated VIF entry in instance network info cache for port 0972a59e-14da-461d-a9da-3400dfe9329e. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.623657] env[62525]: DEBUG nova.network.neutron [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Updating instance_info_cache with network_info: [{"id": "0972a59e-14da-461d-a9da-3400dfe9329e", "address": "fa:16:3e:15:7c:65", "network": {"id": "88085410-bb66-48a6-ac07-84e17bf0d17f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1723786388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f3d5c15d37145aa84818a2ad88f307f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0972a59e-14", "ovs_interfaceid": "0972a59e-14da-461d-a9da-3400dfe9329e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.844819] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.845401] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1893.848342] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.059s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.848590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.871556] env[62525]: INFO nova.scheduler.client.report [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocations for instance c4e31de8-0b94-4fea-aa30-8af5608d257a [ 1893.931740] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782266, 'name': CreateVM_Task, 'duration_secs': 0.403696} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.931894] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1893.932571] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.932754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.933087] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1893.933406] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46423562-6658-4a21-a24f-c6f0359e7315 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.937830] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1893.937830] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a0182c-a570-e40a-f8e1-4852527d2260" [ 1893.937830] env[62525]: _type = "Task" [ 1893.937830] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.945501] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a0182c-a570-e40a-f8e1-4852527d2260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.037942] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080566} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.038254] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1894.039151] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ace38d0-4503-4acf-b21d-24f548ec87d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.063210] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] c802b4f6-f34b-4d40-9bba-1b6d56643b8c/c802b4f6-f34b-4d40-9bba-1b6d56643b8c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1894.063570] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dac23708-6f6b-4372-b544-d964cd553c99 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.085451] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1894.085451] env[62525]: value = "task-1782268" [ 1894.085451] env[62525]: _type = "Task" [ 1894.085451] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.093376] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.101826] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.126634] env[62525]: DEBUG oslo_concurrency.lockutils [req-d56dc1a4-242d-444e-9334-d5e25dcdd4fe req-96aa96b9-4482-457e-b150-4b98c245e36d service nova] Releasing lock "refresh_cache-c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.152624] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Successfully updated port: 10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1894.353153] env[62525]: DEBUG nova.compute.utils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1894.354705] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1894.354985] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1894.381299] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d3214c4-188a-4338-811e-743b4d99b766 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c4e31de8-0b94-4fea-aa30-8af5608d257a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.586s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.397238] env[62525]: DEBUG nova.policy [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6f6e065dce947b2a31313b33a08132c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3900af0b29fa40beb95a4260054c8e5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1894.447414] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a0182c-a570-e40a-f8e1-4852527d2260, 'name': SearchDatastore_Task, 'duration_secs': 0.009135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.447735] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.447968] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.448212] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.448359] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.448534] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1894.448828] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13c6def9-15ff-46e8-84e3-5dc098ae8cb7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.456581] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1894.456758] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1894.457449] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d85f3eae-d2c4-40d0-a03b-8464fa2e8c86 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.462380] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1894.462380] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52afd694-a61d-606f-c832-a33dee64e546" [ 1894.462380] env[62525]: _type = "Task" [ 1894.462380] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.469505] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52afd694-a61d-606f-c832-a33dee64e546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.594792] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782268, 'name': ReconfigVM_Task, 'duration_secs': 0.386147} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.595135] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Reconfigured VM instance instance-00000075 to attach disk [datastore1] c802b4f6-f34b-4d40-9bba-1b6d56643b8c/c802b4f6-f34b-4d40-9bba-1b6d56643b8c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1894.598478] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6edce997-3509-4af5-992f-27345722a595 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.605934] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782263, 'name': CreateVM_Task, 'duration_secs': 3.119224} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.606950] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1894.607271] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1894.607271] env[62525]: value = "task-1782270" [ 1894.607271] env[62525]: _type = "Task" [ 1894.607271] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.607863] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.608034] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.608344] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1894.608613] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e02f0e2-13bf-4d6c-8370-a03ec6100ea6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.615858] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1894.615858] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d041f7-e6b3-3d79-16bd-4e420e7c9538" [ 1894.615858] env[62525]: _type = "Task" [ 1894.615858] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.618568] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782270, 'name': Rename_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.626794] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d041f7-e6b3-3d79-16bd-4e420e7c9538, 'name': SearchDatastore_Task, 'duration_secs': 0.008909} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.627191] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.629817] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.629817] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.654546] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Successfully created port: 6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1894.656645] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.656781] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.656924] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1894.710380] env[62525]: DEBUG nova.compute.manager [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Received event network-vif-plugged-10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1894.710610] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.710854] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.711090] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.711214] env[62525]: DEBUG nova.compute.manager [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] No waiting events found dispatching network-vif-plugged-10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1894.711345] env[62525]: WARNING nova.compute.manager [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Received unexpected event network-vif-plugged-10f619d4-6192-4474-84e8-35cecf4327f7 for instance with vm_state building and task_state spawning. [ 1894.711502] env[62525]: DEBUG nova.compute.manager [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Received event network-changed-10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1894.711654] env[62525]: DEBUG nova.compute.manager [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Refreshing instance network info cache due to event network-changed-10f619d4-6192-4474-84e8-35cecf4327f7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1894.711849] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.858246] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1894.975416] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52afd694-a61d-606f-c832-a33dee64e546, 'name': SearchDatastore_Task, 'duration_secs': 0.017625} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.976297] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d53817d-6a9b-4061-a7e3-528c25f03a96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.981864] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1894.981864] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]526b1629-2c0c-319a-5a9a-fed2d60995da" [ 1894.981864] env[62525]: _type = "Task" [ 1894.981864] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.989661] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526b1629-2c0c-319a-5a9a-fed2d60995da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.118658] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782270, 'name': Rename_Task, 'duration_secs': 0.150195} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.118963] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1895.119244] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8af88703-211f-45b3-ba11-62d267d1f00c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.126084] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1895.126084] env[62525]: value = "task-1782271" [ 1895.126084] env[62525]: _type = "Task" [ 1895.126084] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.133570] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.189426] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1895.320764] env[62525]: DEBUG nova.network.neutron [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.491757] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]526b1629-2c0c-319a-5a9a-fed2d60995da, 'name': SearchDatastore_Task, 'duration_secs': 0.008866} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.492053] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.492301] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c008fa1d-7cd6-4f8c-9459-4a47f342eeaf/c008fa1d-7cd6-4f8c-9459-4a47f342eeaf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1895.492604] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.492795] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.493016] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9ef8e0b-3ba2-4240-b88a-be33bddabd9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.494898] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a9a331a-90f4-4c1d-972c-69c3af438943 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.502641] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1895.502641] env[62525]: value = "task-1782272" [ 1895.502641] env[62525]: _type = "Task" [ 1895.502641] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.506202] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.506381] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.507454] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be39fbe3-c68b-4443-bf56-53ac1a09aba3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.513175] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.516176] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1895.516176] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52f6b6e3-e500-4669-127b-9aa637f793bf" [ 1895.516176] env[62525]: _type = "Task" [ 1895.516176] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.524095] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f6b6e3-e500-4669-127b-9aa637f793bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.635909] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782271, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.823262] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.823682] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Instance network_info: |[{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1895.824017] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.824206] env[62525]: DEBUG nova.network.neutron [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Refreshing network info cache for port 10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1895.825468] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:48:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10f619d4-6192-4474-84e8-35cecf4327f7', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1895.833290] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating folder: Project (6dbd20742b0f42d5ac04268223bfe911). Parent ref: group-v369553. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1895.836521] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2c5c336-3141-473d-97c2-a3594bda4482 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.847690] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created folder: Project (6dbd20742b0f42d5ac04268223bfe911) in parent group-v369553. [ 1895.847881] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating folder: Instances. Parent ref: group-v369866. {{(pid=62525) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1895.848130] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6727f17c-bfa3-44cd-9b44-e637e5c4b16c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.857571] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created folder: Instances in parent group-v369866. [ 1895.857800] env[62525]: DEBUG oslo.service.loopingcall [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1895.857985] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1895.858212] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dabd4c2b-c8c3-4c5a-8409-7d02cac5ae49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.876876] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1895.884964] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1895.884964] env[62525]: value = "task-1782275" [ 1895.884964] env[62525]: _type = "Task" [ 1895.884964] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.896393] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782275, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.906257] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1895.906528] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1895.906690] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1895.906873] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1895.907027] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1895.907180] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1895.907386] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1895.907674] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1895.907757] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1895.907854] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1895.908029] env[62525]: DEBUG nova.virt.hardware [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1895.908818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8d8b98-d6d9-4cdb-81a8-a420d1af5a96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.916933] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6240293d-aa90-4372-9872-86eee6762653 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.013147] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782272, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.029104] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52f6b6e3-e500-4669-127b-9aa637f793bf, 'name': SearchDatastore_Task, 'duration_secs': 0.019364} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.030142] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c1937d8-6797-44e3-b5d5-4f0bb2ea012b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.036991] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1896.036991] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527e49bc-d34a-bfd9-924d-f62e752d4a21" [ 1896.036991] env[62525]: _type = "Task" [ 1896.036991] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.046383] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527e49bc-d34a-bfd9-924d-f62e752d4a21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.125329] env[62525]: DEBUG nova.compute.manager [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Received event network-vif-plugged-6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.125578] env[62525]: DEBUG oslo_concurrency.lockutils [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] Acquiring lock "abd538d5-f433-4896-9871-5cdef303cda0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.125759] env[62525]: DEBUG oslo_concurrency.lockutils [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] Lock "abd538d5-f433-4896-9871-5cdef303cda0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.125992] env[62525]: DEBUG oslo_concurrency.lockutils [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] Lock "abd538d5-f433-4896-9871-5cdef303cda0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.126335] env[62525]: DEBUG nova.compute.manager [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] No waiting events found dispatching network-vif-plugged-6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1896.126569] env[62525]: WARNING nova.compute.manager [req-a6bc8579-484c-426d-9da4-3cce45219298 req-07c61c1d-ecc6-47af-aeeb-9acb6047a627 service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Received unexpected event network-vif-plugged-6d956a60-0763-439d-9f38-a05ab94cca9f for instance with vm_state building and task_state spawning. [ 1896.138043] env[62525]: DEBUG oslo_vmware.api [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782271, 'name': PowerOnVM_Task, 'duration_secs': 0.534989} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.138423] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1896.138743] env[62525]: INFO nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Took 9.34 seconds to spawn the instance on the hypervisor. [ 1896.138930] env[62525]: DEBUG nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1896.139965] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04763c56-9054-43e4-82cb-0c2a99aa62d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.151964] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Successfully updated port: 6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1896.166346] env[62525]: DEBUG nova.network.neutron [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updated VIF entry in instance network info cache for port 10f619d4-6192-4474-84e8-35cecf4327f7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1896.166813] env[62525]: DEBUG nova.network.neutron [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.395665] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782275, 'name': CreateVM_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.514127] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764092} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.514504] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] c008fa1d-7cd6-4f8c-9459-4a47f342eeaf/c008fa1d-7cd6-4f8c-9459-4a47f342eeaf.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1896.514589] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1896.514874] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caea2dc3-7d58-421a-94dd-b77f2e8c9565 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.523954] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1896.523954] env[62525]: value = "task-1782277" [ 1896.523954] env[62525]: _type = "Task" [ 1896.523954] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.534650] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.550120] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527e49bc-d34a-bfd9-924d-f62e752d4a21, 'name': SearchDatastore_Task, 'duration_secs': 0.104927} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.550455] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.550766] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1896.551083] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-447ad2fd-1a9f-4272-bf27-3ecdabe23126 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.558208] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1896.558208] env[62525]: value = "task-1782278" [ 1896.558208] env[62525]: _type = "Task" [ 1896.558208] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.568346] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.663196] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.663438] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.663526] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1896.666733] env[62525]: INFO nova.compute.manager [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Took 17.46 seconds to build instance. [ 1896.669341] env[62525]: DEBUG oslo_concurrency.lockutils [req-20ef1f31-075d-48d2-85c8-1933b31482af req-8f0d1dd1-ea36-4907-b4e4-ecd0d8a8806c service nova] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.740432] env[62525]: DEBUG nova.compute.manager [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Received event network-changed-6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.740757] env[62525]: DEBUG nova.compute.manager [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Refreshing instance network info cache due to event network-changed-6d956a60-0763-439d-9f38-a05ab94cca9f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1896.740959] env[62525]: DEBUG oslo_concurrency.lockutils [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] Acquiring lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.897672] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782275, 'name': CreateVM_Task} progress is 99%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.008611] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1897.008611] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.008897] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.008897] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.009148] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.009321] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.009532] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.009709] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1897.009909] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.032677] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07834} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.032943] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1897.033714] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f499254-541d-45cb-bbb2-3b49da6fb6a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.055452] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] c008fa1d-7cd6-4f8c-9459-4a47f342eeaf/c008fa1d-7cd6-4f8c-9459-4a47f342eeaf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1897.055969] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56efa20a-207d-4856-af0e-cbf442d8d069 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.082575] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.084030] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1897.084030] env[62525]: value = "task-1782279" [ 1897.084030] env[62525]: _type = "Task" [ 1897.084030] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.092907] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782279, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.169595] env[62525]: DEBUG oslo_concurrency.lockutils [None req-016e32bb-036c-4c72-90a6-9e8847fd8614 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.966s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.198945] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1897.362158] env[62525]: DEBUG nova.network.neutron [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.397466] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782275, 'name': CreateVM_Task, 'duration_secs': 1.391199} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.397466] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1897.397860] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.398124] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.398364] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1897.398615] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc51678b-32a1-4201-b1eb-ea390f2f259a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.403339] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1897.403339] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52fd2755-11d7-311e-8763-77e5b82633d1" [ 1897.403339] env[62525]: _type = "Task" [ 1897.403339] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.414115] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd2755-11d7-311e-8763-77e5b82633d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.513700] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.513700] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.513700] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.513939] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1897.514777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7dfa08-bc2d-449c-b2e6-0251bad5bef3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.523274] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bbccb7-688e-44ef-913f-769f27d099cb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.537666] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db38d9b-01f2-4022-9189-cde6b9785697 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.545195] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df988abb-1f95-4180-bba2-77167ab5d027 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.574651] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179847MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1897.574809] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.575015] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.580367] env[62525]: DEBUG oslo_concurrency.lockutils [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.580621] env[62525]: DEBUG oslo_concurrency.lockutils [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.580801] env[62525]: DEBUG nova.compute.manager [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1897.581555] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9505a30-3a1d-4faf-b22c-26624c00f980 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.591449] env[62525]: DEBUG nova.compute.manager [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1897.592121] env[62525]: DEBUG nova.objects.instance [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'flavor' on Instance uuid c802b4f6-f34b-4d40-9bba-1b6d56643b8c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1897.594032] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.599309] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782279, 'name': ReconfigVM_Task, 'duration_secs': 0.373523} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.599562] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Reconfigured VM instance instance-00000077 to attach disk [datastore1] c008fa1d-7cd6-4f8c-9459-4a47f342eeaf/c008fa1d-7cd6-4f8c-9459-4a47f342eeaf.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1897.600200] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69e9a5ff-442a-4950-8000-79e3bee277e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.606915] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1897.606915] env[62525]: value = "task-1782280" [ 1897.606915] env[62525]: _type = "Task" [ 1897.606915] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.617011] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782280, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.864773] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.865251] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Instance network_info: |[{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1897.865642] env[62525]: DEBUG oslo_concurrency.lockutils [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] Acquired lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.865862] env[62525]: DEBUG nova.network.neutron [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Refreshing network info cache for port 6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1897.867126] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:0f:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d956a60-0763-439d-9f38-a05ab94cca9f', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1897.875543] env[62525]: DEBUG oslo.service.loopingcall [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.876476] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1897.876703] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fffee06-2f6e-42b3-b4b0-c1a2e5509f76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.900220] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1897.900220] env[62525]: value = "task-1782281" [ 1897.900220] env[62525]: _type = "Task" [ 1897.900220] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.910976] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782281, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.916644] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52fd2755-11d7-311e-8763-77e5b82633d1, 'name': SearchDatastore_Task, 'duration_secs': 0.0226} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.916965] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.917220] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1897.917466] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.917619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.917796] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1897.918053] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75ed2dca-c7b9-467f-aad3-e024bee6062e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.089525] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.097226] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.097525] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9a54164-632e-47a7-af3e-e5a01dcf5061 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.105769] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1898.105769] env[62525]: value = "task-1782282" [ 1898.105769] env[62525]: _type = "Task" [ 1898.105769] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.117974] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.123203] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782280, 'name': Rename_Task, 'duration_secs': 0.15102} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.123556] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.123899] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-674e7fa7-5974-4c23-9300-c5bc0fa44cb7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.131238] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1898.131238] env[62525]: value = "task-1782283" [ 1898.131238] env[62525]: _type = "Task" [ 1898.131238] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.139299] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.414674] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782281, 'name': CreateVM_Task, 'duration_secs': 0.358866} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.414874] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1898.415627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.415833] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.416203] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1898.416474] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef4360e7-7167-47d0-a79f-2f89971d8f49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.421497] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1898.421497] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5281d664-a864-bb41-b6c2-aad00e746084" [ 1898.421497] env[62525]: _type = "Task" [ 1898.421497] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.430145] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5281d664-a864-bb41-b6c2-aad00e746084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.569923] env[62525]: DEBUG nova.network.neutron [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updated VIF entry in instance network info cache for port 6d956a60-0763-439d-9f38-a05ab94cca9f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1898.571025] env[62525]: DEBUG nova.network.neutron [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.584310] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.607316] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.607476] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 130a3015-6caf-4374-a35f-9dd49bb8b3bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.607601] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance fd078815-58e6-4a3a-9da8-dd5324ea76b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.607719] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.607833] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c802b4f6-f34b-4d40-9bba-1b6d56643b8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance c008fa1d-7cd6-4f8c-9459-4a47f342eeaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 870d7795-49ca-4201-983a-a85b590e805e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance abd538d5-f433-4896-9871-5cdef303cda0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1898.609364] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1898.620381] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.641301] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782283, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.723012] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3671872-10f1-4a39-a9ea-3a82bdd9f922 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.731071] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ed70e1-8774-4f70-a46e-e90e324de0aa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.761843] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1650d1-d5ab-4968-86a6-9d99ea90511a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.768779] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253998e7-2a7e-463d-bb10-1a7060a306c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.781332] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.931189] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5281d664-a864-bb41-b6c2-aad00e746084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.072746] env[62525]: DEBUG oslo_concurrency.lockutils [req-b206d2fd-d45f-46ac-82f9-4b0ef12d4494 req-793892df-2a0a-4d08-a3dd-cacad6e3aa4f service nova] Releasing lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.084498] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.118902] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.139813] env[62525]: DEBUG oslo_vmware.api [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782283, 'name': PowerOnVM_Task, 'duration_secs': 0.511164} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.140075] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1899.140282] env[62525]: INFO nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1899.140465] env[62525]: DEBUG nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1899.141187] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06154a07-e3a7-490f-80af-c2a0cd147783 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.284803] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.432545] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5281d664-a864-bb41-b6c2-aad00e746084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.585180] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.620363] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.660048] env[62525]: INFO nova.compute.manager [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Took 13.19 seconds to build instance. [ 1899.789847] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1899.790088] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.215s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.811469] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1899.811668] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1899.812454] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ab2397a-d21d-4e1e-94b7-e8e91b89a8f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.819760] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1899.819760] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52058fb1-2752-8233-6429-3dedda34a7f1" [ 1899.819760] env[62525]: _type = "Task" [ 1899.819760] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.827316] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52058fb1-2752-8233-6429-3dedda34a7f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.933768] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5281d664-a864-bb41-b6c2-aad00e746084, 'name': SearchDatastore_Task, 'duration_secs': 1.39575} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.934116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.934361] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1899.934588] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.086617] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.121549] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.162411] env[62525]: DEBUG oslo_concurrency.lockutils [None req-37340fc7-5a20-455b-8c7c-af7c77b8f1ca tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.732s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.333286] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52058fb1-2752-8233-6429-3dedda34a7f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009867} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.334142] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e556aac7-8ceb-4955-8652-de7ba3da8cad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.339853] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1900.339853] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]522c7e8c-0752-c157-c76b-9894a95f523e" [ 1900.339853] env[62525]: _type = "Task" [ 1900.339853] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.348216] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522c7e8c-0752-c157-c76b-9894a95f523e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.587368] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.621536] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.719014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.719339] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.719596] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.719849] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.720058] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.722341] env[62525]: INFO nova.compute.manager [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Terminating instance [ 1900.724247] env[62525]: DEBUG nova.compute.manager [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1900.724440] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1900.725272] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154e97b8-b344-45e9-889c-a0edc0302fbc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.733590] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1900.734170] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8ebc756-5e8e-423e-92f6-2891f0db570c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.740637] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1900.740637] env[62525]: value = "task-1782286" [ 1900.740637] env[62525]: _type = "Task" [ 1900.740637] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.748379] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.851851] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]522c7e8c-0752-c157-c76b-9894a95f523e, 'name': SearchDatastore_Task, 'duration_secs': 0.16711} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.852216] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.852564] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1900.852876] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.853142] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1900.853415] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9746572a-f22e-4150-a1de-f103ecd7e882 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.855962] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fddc2d4a-99ac-413c-a4fb-17c0d00bb5f0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.862248] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1900.862248] env[62525]: value = "task-1782287" [ 1900.862248] env[62525]: _type = "Task" [ 1900.862248] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.866104] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1900.866280] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1900.867352] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4dc4730-bf2e-4aa4-a2c8-c89b8da1cdfb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.872335] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.875665] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1900.875665] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e5f8e0-1a1f-46bb-c96d-5d34037aae75" [ 1900.875665] env[62525]: _type = "Task" [ 1900.875665] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.884343] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e5f8e0-1a1f-46bb-c96d-5d34037aae75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.089091] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782278, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.218907} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.089353] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1901.089575] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1901.089867] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c807e18-58a2-44a8-b291-24ffe2f292db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.096624] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1901.096624] env[62525]: value = "task-1782288" [ 1901.096624] env[62525]: _type = "Task" [ 1901.096624] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.106554] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.121831] env[62525]: DEBUG oslo_vmware.api [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782282, 'name': PowerOffVM_Task, 'duration_secs': 2.743392} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.122099] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1901.122357] env[62525]: DEBUG nova.compute.manager [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1901.123118] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269adebd-9d93-44c7-8e37-c41816171f11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.250810] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782286, 'name': PowerOffVM_Task, 'duration_secs': 0.18791} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.251093] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1901.251311] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1901.251574] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83fdc172-50f3-412f-9483-8ea3874ac01e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.372714] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782287, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.384649] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e5f8e0-1a1f-46bb-c96d-5d34037aae75, 'name': SearchDatastore_Task, 'duration_secs': 0.008556} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.385468] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad189abe-87a0-4cc0-bc0a-3064a76c2f2d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.390518] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1901.390518] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5216e79d-33cb-2ae2-d806-09cc7b55d0da" [ 1901.390518] env[62525]: _type = "Task" [ 1901.390518] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.398448] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5216e79d-33cb-2ae2-d806-09cc7b55d0da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.606819] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066194} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.608683] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1901.608683] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b850586d-1e76-483c-b602-0501458a54ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.631413] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1901.633617] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f509dd10-df1c-40f3-a069-0386a9072d79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.649022] env[62525]: DEBUG oslo_concurrency.lockutils [None req-10ac890f-9b30-4e60-b928-ed7d82aee668 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 4.068s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.655919] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1901.655919] env[62525]: value = "task-1782290" [ 1901.655919] env[62525]: _type = "Task" [ 1901.655919] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.665394] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.873012] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.938206} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.873305] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1901.873582] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1901.873778] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83594ffb-5372-4e08-8bf1-1da9f4edaeef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.880425] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1901.880425] env[62525]: value = "task-1782291" [ 1901.880425] env[62525]: _type = "Task" [ 1901.880425] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.889071] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.898986] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5216e79d-33cb-2ae2-d806-09cc7b55d0da, 'name': SearchDatastore_Task, 'duration_secs': 0.126114} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.899206] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.899455] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] abd538d5-f433-4896-9871-5cdef303cda0/abd538d5-f433-4896-9871-5cdef303cda0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1901.899704] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-720d0483-e87d-4054-a24c-41da7bac5d0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.905906] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1901.905906] env[62525]: value = "task-1782292" [ 1901.905906] env[62525]: _type = "Task" [ 1901.905906] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.914452] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.166880] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782290, 'name': ReconfigVM_Task, 'duration_secs': 0.311019} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.167206] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.167857] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d381d99-3b50-4fa8-968f-7813934819fc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.175437] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1902.175437] env[62525]: value = "task-1782293" [ 1902.175437] env[62525]: _type = "Task" [ 1902.175437] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.183603] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782293, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.393982] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078626} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.394413] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1902.395500] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5753f5-b379-42bd-b92a-9fbbd406221b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.427821] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1902.432954] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb19f2ca-0725-4309-b5f4-e042dc1eec5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.458073] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782292, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.459521] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1902.459521] env[62525]: value = "task-1782294" [ 1902.459521] env[62525]: _type = "Task" [ 1902.459521] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.467712] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.473259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.473500] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.473724] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.473917] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.474100] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.476370] env[62525]: INFO nova.compute.manager [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Terminating instance [ 1902.478278] env[62525]: DEBUG nova.compute.manager [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1902.478480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1902.479538] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdfa242-9ecf-4430-a930-7556219b36b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.486771] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1902.487147] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26f25951-b408-4c61-9c9a-09bcac87cf47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.685994] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782293, 'name': Rename_Task, 'duration_secs': 0.153451} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.685994] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1902.686382] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e424d199-3e0b-49bf-84a6-4aeb7e11b20f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.692962] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1902.692962] env[62525]: value = "task-1782297" [ 1902.692962] env[62525]: _type = "Task" [ 1902.692962] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.699957] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782297, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.925011] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61917} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.925325] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] abd538d5-f433-4896-9871-5cdef303cda0/abd538d5-f433-4896-9871-5cdef303cda0.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1902.925547] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1902.925800] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bf1696a-c1ab-4cca-aed5-ac9850a6f750 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.931988] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1902.931988] env[62525]: value = "task-1782298" [ 1902.931988] env[62525]: _type = "Task" [ 1902.931988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.939022] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.969821] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782294, 'name': ReconfigVM_Task, 'duration_secs': 0.292988} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.970135] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.970752] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ebe6837-85fd-423e-b43e-68a20a31966e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.976406] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1902.976406] env[62525]: value = "task-1782299" [ 1902.976406] env[62525]: _type = "Task" [ 1902.976406] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.986308] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782299, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.991730] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1902.991959] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1902.992212] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] c802b4f6-f34b-4d40-9bba-1b6d56643b8c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1902.992942] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6d91391-a398-4521-a5e2-e29eb029a14d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.994765] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1902.994998] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1902.995628] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleting the datastore file [datastore1] c008fa1d-7cd6-4f8c-9459-4a47f342eeaf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1902.995628] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88323f09-5d75-442b-aad4-551b34cd4596 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.002128] env[62525]: DEBUG oslo_vmware.api [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1903.002128] env[62525]: value = "task-1782300" [ 1903.002128] env[62525]: _type = "Task" [ 1903.002128] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.003468] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for the task: (returnval){ [ 1903.003468] env[62525]: value = "task-1782301" [ 1903.003468] env[62525]: _type = "Task" [ 1903.003468] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.015896] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.018607] env[62525]: DEBUG oslo_vmware.api [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.203372] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782297, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.441944] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072885} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.442256] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1903.443068] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9472172b-1d8b-41f6-8193-e2f2f10c8b61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.464880] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] abd538d5-f433-4896-9871-5cdef303cda0/abd538d5-f433-4896-9871-5cdef303cda0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1903.465145] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4bc7bd7-fddd-4e59-9893-ad07c480c646 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.486263] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782299, 'name': Rename_Task, 'duration_secs': 0.187506} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.486539] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1903.487610] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b55e981-5199-4116-add1-5b47882dc815 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.488983] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1903.488983] env[62525]: value = "task-1782302" [ 1903.488983] env[62525]: _type = "Task" [ 1903.488983] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.492957] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1903.492957] env[62525]: value = "task-1782303" [ 1903.492957] env[62525]: _type = "Task" [ 1903.492957] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.498921] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.501624] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.512422] env[62525]: DEBUG oslo_vmware.api [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197292} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.513020] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1903.513223] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1903.513401] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1903.513572] env[62525]: INFO nova.compute.manager [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Took 1.04 seconds to destroy the instance on the hypervisor. [ 1903.513811] env[62525]: DEBUG oslo.service.loopingcall [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.514023] env[62525]: DEBUG nova.compute.manager [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1903.514104] env[62525]: DEBUG nova.network.neutron [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1903.518533] env[62525]: DEBUG oslo_vmware.api [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Task: {'id': task-1782301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207738} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.519047] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1903.519240] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1903.519418] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1903.519605] env[62525]: INFO nova.compute.manager [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Took 2.80 seconds to destroy the instance on the hypervisor. [ 1903.519843] env[62525]: DEBUG oslo.service.loopingcall [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.520062] env[62525]: DEBUG nova.compute.manager [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1903.520133] env[62525]: DEBUG nova.network.neutron [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1903.705512] env[62525]: DEBUG oslo_vmware.api [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782297, 'name': PowerOnVM_Task, 'duration_secs': 0.638878} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.705818] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1903.706017] env[62525]: INFO nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Took 14.67 seconds to spawn the instance on the hypervisor. [ 1903.706511] env[62525]: DEBUG nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1903.707313] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5270c2f6-665e-4aec-9c70-877441f81a30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.945585] env[62525]: DEBUG nova.compute.manager [req-66915851-103e-4954-aa85-426d5daaca8b req-7a2d8acb-c3c6-4140-9a90-3c9c9cda1b6d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Received event network-vif-deleted-4b806ffd-ef3b-46ce-b9af-cd4758d29d4c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.945741] env[62525]: INFO nova.compute.manager [req-66915851-103e-4954-aa85-426d5daaca8b req-7a2d8acb-c3c6-4140-9a90-3c9c9cda1b6d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Neutron deleted interface 4b806ffd-ef3b-46ce-b9af-cd4758d29d4c; detaching it from the instance and deleting it from the info cache [ 1903.945833] env[62525]: DEBUG nova.network.neutron [req-66915851-103e-4954-aa85-426d5daaca8b req-7a2d8acb-c3c6-4140-9a90-3c9c9cda1b6d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.005778] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782303, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.009858] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782302, 'name': ReconfigVM_Task, 'duration_secs': 0.346439} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.010235] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Reconfigured VM instance instance-00000079 to attach disk [datastore1] abd538d5-f433-4896-9871-5cdef303cda0/abd538d5-f433-4896-9871-5cdef303cda0.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1904.010992] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-797d0f6e-0a4a-4f24-ba66-489a454975a0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.019348] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1904.019348] env[62525]: value = "task-1782304" [ 1904.019348] env[62525]: _type = "Task" [ 1904.019348] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.028068] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782304, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.225021] env[62525]: INFO nova.compute.manager [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Took 19.71 seconds to build instance. [ 1904.421799] env[62525]: DEBUG nova.network.neutron [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.426403] env[62525]: DEBUG nova.network.neutron [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.448292] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-831ee48c-8b4c-455a-9295-9118be5ccf6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.457998] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1e5564-b2ae-43d2-baf5-d0cfcff7ce11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.489627] env[62525]: DEBUG nova.compute.manager [req-66915851-103e-4954-aa85-426d5daaca8b req-7a2d8acb-c3c6-4140-9a90-3c9c9cda1b6d service nova] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Detach interface failed, port_id=4b806ffd-ef3b-46ce-b9af-cd4758d29d4c, reason: Instance c802b4f6-f34b-4d40-9bba-1b6d56643b8c could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1904.507469] env[62525]: DEBUG oslo_vmware.api [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782303, 'name': PowerOnVM_Task, 'duration_secs': 0.900104} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.507786] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1904.507992] env[62525]: INFO nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 10.97 seconds to spawn the instance on the hypervisor. [ 1904.508189] env[62525]: DEBUG nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1904.509104] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77e337b-0760-4fa4-b7da-abc7e3a429b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.533151] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782304, 'name': Rename_Task, 'duration_secs': 0.185047} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.533151] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1904.533151] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7677d74-34ff-4cf2-b258-5a710f9e38ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.537839] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1904.537839] env[62525]: value = "task-1782305" [ 1904.537839] env[62525]: _type = "Task" [ 1904.537839] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.546309] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.727396] env[62525]: DEBUG oslo_concurrency.lockutils [None req-abb33097-8bdd-49a1-9ec1-cce24135411a tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.221s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.924108] env[62525]: INFO nova.compute.manager [-] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Took 1.41 seconds to deallocate network for instance. [ 1904.928591] env[62525]: INFO nova.compute.manager [-] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Took 1.41 seconds to deallocate network for instance. [ 1905.026185] env[62525]: INFO nova.compute.manager [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 18.29 seconds to build instance. [ 1905.049632] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782305, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.430905] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.431205] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.431481] env[62525]: DEBUG nova.objects.instance [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid c802b4f6-f34b-4d40-9bba-1b6d56643b8c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.436027] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.535048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a46d8a43-94db-49fa-b7fa-a8939603e7b9 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.802s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.549174] env[62525]: DEBUG oslo_vmware.api [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782305, 'name': PowerOnVM_Task, 'duration_secs': 0.640187} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.549458] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1905.549654] env[62525]: INFO nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1905.549867] env[62525]: DEBUG nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1905.550763] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94846005-bf44-4626-97b4-9d76d60d64dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.877479] env[62525]: DEBUG nova.compute.manager [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Received event network-changed-10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1905.877835] env[62525]: DEBUG nova.compute.manager [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Refreshing instance network info cache due to event network-changed-10f619d4-6192-4474-84e8-35cecf4327f7. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1905.877835] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.877911] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.878023] env[62525]: DEBUG nova.network.neutron [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Refreshing network info cache for port 10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1905.976998] env[62525]: DEBUG nova.compute.manager [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Received event network-vif-deleted-0972a59e-14da-461d-a9da-3400dfe9329e {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1905.977285] env[62525]: DEBUG nova.compute.manager [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1905.977489] env[62525]: DEBUG nova.compute.manager [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing instance network info cache due to event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1905.977658] env[62525]: DEBUG oslo_concurrency.lockutils [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.977814] env[62525]: DEBUG oslo_concurrency.lockutils [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.977982] env[62525]: DEBUG nova.network.neutron [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1906.068627] env[62525]: INFO nova.compute.manager [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Took 18.89 seconds to build instance. [ 1906.088916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3df4e84-d29c-4881-bb1e-33bed34f5402 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.097467] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d569df3-0f8e-444e-88f6-844fba962055 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.130934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcebd194-d6e0-4502-9559-478d898ed4fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.138688] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8265316-3bec-4adf-b1b5-a9e20c80d0d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.153145] env[62525]: DEBUG nova.compute.provider_tree [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1906.401286] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.401513] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.574856] env[62525]: DEBUG oslo_concurrency.lockutils [None req-29c2e975-01d1-455f-86d2-f38e49d6da42 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.403s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.656580] env[62525]: DEBUG nova.scheduler.client.report [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1906.704360] env[62525]: DEBUG nova.network.neutron [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updated VIF entry in instance network info cache for port 10f619d4-6192-4474-84e8-35cecf4327f7. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1906.704843] env[62525]: DEBUG nova.network.neutron [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.784859] env[62525]: DEBUG nova.network.neutron [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updated VIF entry in instance network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1906.785256] env[62525]: DEBUG nova.network.neutron [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.904080] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1907.163071] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.165290] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.731s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.165544] env[62525]: DEBUG nova.objects.instance [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lazy-loading 'resources' on Instance uuid c008fa1d-7cd6-4f8c-9459-4a47f342eeaf {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1907.185524] env[62525]: INFO nova.scheduler.client.report [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance c802b4f6-f34b-4d40-9bba-1b6d56643b8c [ 1907.208785] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1c53aaf-8928-4e72-ad69-f17872bcf935 req-d128acbf-c20a-4666-a2ca-5c2851217766 service nova] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.288332] env[62525]: DEBUG oslo_concurrency.lockutils [req-2b01f93a-6260-409b-9eb5-6d385586c670 req-46b5f33e-ff55-4f80-b492-35877ed547ed service nova] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.422642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.693404] env[62525]: DEBUG oslo_concurrency.lockutils [None req-35eae0e1-228b-4976-a45d-c99466c9246c tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "c802b4f6-f34b-4d40-9bba-1b6d56643b8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.220s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.793531] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b1a904-ce91-42e9-9e72-db62232587c7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.801401] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4757348f-d3a0-44e2-905f-2419fa968511 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.833359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ec5052-9e2e-4c70-9c35-8e96a2c44f9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.841031] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bbbd2b-8bab-444e-b520-f5b688723b75 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.854810] env[62525]: DEBUG nova.compute.provider_tree [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1907.901260] env[62525]: DEBUG nova.compute.manager [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Received event network-changed-6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1907.901448] env[62525]: DEBUG nova.compute.manager [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Refreshing instance network info cache due to event network-changed-6d956a60-0763-439d-9f38-a05ab94cca9f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1907.901619] env[62525]: DEBUG oslo_concurrency.lockutils [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] Acquiring lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.901765] env[62525]: DEBUG oslo_concurrency.lockutils [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] Acquired lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.901921] env[62525]: DEBUG nova.network.neutron [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Refreshing network info cache for port 6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1908.358494] env[62525]: DEBUG nova.scheduler.client.report [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1908.547501] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.547889] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.548249] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.548607] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.548930] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.555382] env[62525]: INFO nova.compute.manager [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Terminating instance [ 1908.558194] env[62525]: DEBUG nova.compute.manager [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1908.558504] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1908.559709] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8b3aa2-47c9-4959-972e-ecfa37198ff0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.570886] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1908.571257] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc7480ec-e708-4e0f-b7f3-5a4e20f33aab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.579313] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1908.579313] env[62525]: value = "task-1782306" [ 1908.579313] env[62525]: _type = "Task" [ 1908.579313] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.592523] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.630943] env[62525]: DEBUG nova.network.neutron [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updated VIF entry in instance network info cache for port 6d956a60-0763-439d-9f38-a05ab94cca9f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1908.631473] env[62525]: DEBUG nova.network.neutron [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.863642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.865920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.443s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.868106] env[62525]: INFO nova.compute.claims [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1908.920562] env[62525]: INFO nova.scheduler.client.report [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Deleted allocations for instance c008fa1d-7cd6-4f8c-9459-4a47f342eeaf [ 1909.089816] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782306, 'name': PowerOffVM_Task, 'duration_secs': 0.455007} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.090161] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1909.090370] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1909.090683] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5bd8427-ad37-424b-a205-9c227a349333 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.134418] env[62525]: DEBUG oslo_concurrency.lockutils [req-79f9afcd-84e4-4939-b01e-a04f8de6c91f req-2357100b-21a1-4e48-8ca0-b13b2111f12a service nova] Releasing lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.222250] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1909.222477] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1909.222668] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleting the datastore file [datastore1] 130a3015-6caf-4374-a35f-9dd49bb8b3bf {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1909.223025] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25d9cf48-6d8c-4d89-aff4-7e8ea4b38bf1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.229895] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for the task: (returnval){ [ 1909.229895] env[62525]: value = "task-1782308" [ 1909.229895] env[62525]: _type = "Task" [ 1909.229895] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.237780] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.429327] env[62525]: DEBUG oslo_concurrency.lockutils [None req-d16300f8-ad1a-49cf-8220-a2ee3a094664 tempest-ServerDiskConfigTestJSON-1706963141 tempest-ServerDiskConfigTestJSON-1706963141-project-member] Lock "c008fa1d-7cd6-4f8c-9459-4a47f342eeaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.710s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.740347] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.990531] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9954597-07c3-47ef-beed-c01b0b590b22 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.998359] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18c3f1e-c524-45a7-8500-12df42d01895 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.028172] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf4085f-fc9a-4ce6-a64b-b65b66464d5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.035720] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83e7910-a82d-4d44-8088-f18e7f5fb477 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.052753] env[62525]: DEBUG nova.compute.provider_tree [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1910.241042] env[62525]: DEBUG oslo_vmware.api [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Task: {'id': task-1782308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.732619} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.241042] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1910.241233] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1910.241418] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1910.241604] env[62525]: INFO nova.compute.manager [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1910.241845] env[62525]: DEBUG oslo.service.loopingcall [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.242041] env[62525]: DEBUG nova.compute.manager [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1910.242139] env[62525]: DEBUG nova.network.neutron [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1910.556064] env[62525]: DEBUG nova.scheduler.client.report [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1910.836863] env[62525]: DEBUG nova.compute.manager [req-ffce00c2-6b6f-4b60-8ca7-aea665d4fe09 req-5ad7a089-e642-4ab1-ad81-ff63c802b7d6 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Received event network-vif-deleted-e889a50a-6d0d-4673-bbd6-be0d2a72bd1c {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1910.837129] env[62525]: INFO nova.compute.manager [req-ffce00c2-6b6f-4b60-8ca7-aea665d4fe09 req-5ad7a089-e642-4ab1-ad81-ff63c802b7d6 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Neutron deleted interface e889a50a-6d0d-4673-bbd6-be0d2a72bd1c; detaching it from the instance and deleting it from the info cache [ 1910.837316] env[62525]: DEBUG nova.network.neutron [req-ffce00c2-6b6f-4b60-8ca7-aea665d4fe09 req-5ad7a089-e642-4ab1-ad81-ff63c802b7d6 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.061200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.061592] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1911.309043] env[62525]: DEBUG nova.network.neutron [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.341513] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4744d5f1-3aaa-42e0-b6a4-7e6ec047c112 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.351343] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fe13ab-bcea-4564-bee7-cb5e60490851 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.380397] env[62525]: DEBUG nova.compute.manager [req-ffce00c2-6b6f-4b60-8ca7-aea665d4fe09 req-5ad7a089-e642-4ab1-ad81-ff63c802b7d6 service nova] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Detach interface failed, port_id=e889a50a-6d0d-4673-bbd6-be0d2a72bd1c, reason: Instance 130a3015-6caf-4374-a35f-9dd49bb8b3bf could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1911.566701] env[62525]: DEBUG nova.compute.utils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1911.568214] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1911.568387] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1911.611193] env[62525]: DEBUG nova.policy [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83bf79d024f345a9a8c02004f8cefbaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab7fca262814290a975bf85badc9b71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 1911.812512] env[62525]: INFO nova.compute.manager [-] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Took 1.57 seconds to deallocate network for instance. [ 1911.870402] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Successfully created port: 12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1912.071976] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1912.318713] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.318989] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.319316] env[62525]: DEBUG nova.objects.instance [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lazy-loading 'resources' on Instance uuid 130a3015-6caf-4374-a35f-9dd49bb8b3bf {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.577684] env[62525]: INFO nova.virt.block_device [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Booting with volume 004ac126-a2b0-4eff-a790-f50f2497a817 at /dev/sda [ 1912.616556] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15ceb6e5-6650-4457-aa4b-8dc6ebaec657 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.626669] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9046f57-8ff6-4a44-bba3-404febc96127 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.656530] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-890f022c-14dd-453f-b4d3-167d462352ad {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.665069] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a2d56f-78d9-41a0-b994-72122dd55213 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.694695] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524f5a97-64aa-4679-9188-d5e4d0b5078f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.701526] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26028889-0aad-4ea1-a868-2c7b9bbeaaa1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.715390] env[62525]: DEBUG nova.virt.block_device [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating existing volume attachment record: 6608a6a0-27bd-4728-a4d3-c52d3ac46acd {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1912.935988] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1db55fa-5b4e-4249-9498-015308216690 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.943816] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51efc919-29ca-4b2d-af3e-dcfc1ae15823 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.976282] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c7facd-8eb8-43af-94e8-7a56036dab6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.985250] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae96d25-a2a6-4d01-bc04-67fde17bdecf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.000466] env[62525]: DEBUG nova.compute.provider_tree [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.264234] env[62525]: DEBUG nova.compute.manager [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Received event network-vif-plugged-12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1913.264420] env[62525]: DEBUG oslo_concurrency.lockutils [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.264586] env[62525]: DEBUG oslo_concurrency.lockutils [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.264740] env[62525]: DEBUG oslo_concurrency.lockutils [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.264907] env[62525]: DEBUG nova.compute.manager [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] No waiting events found dispatching network-vif-plugged-12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1913.265084] env[62525]: WARNING nova.compute.manager [req-149f50e6-b1fb-46c8-bde7-2d874718d173 req-b2d9eb5e-5713-478c-9c3b-4f86d581022e service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Received unexpected event network-vif-plugged-12e7bbdb-87db-4e0a-9d1d-21fc6357160d for instance with vm_state building and task_state block_device_mapping. [ 1913.362367] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Successfully updated port: 12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1913.504251] env[62525]: DEBUG nova.scheduler.client.report [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1913.864054] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.864376] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.864376] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.009461] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.030122] env[62525]: INFO nova.scheduler.client.report [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Deleted allocations for instance 130a3015-6caf-4374-a35f-9dd49bb8b3bf [ 1914.396755] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.526722] env[62525]: DEBUG nova.network.neutron [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.538083] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6dd05f09-f4ab-43d9-acdb-aad31a554ff4 tempest-ServersTestJSON-1950281889 tempest-ServersTestJSON-1950281889-project-member] Lock "130a3015-6caf-4374-a35f-9dd49bb8b3bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.990s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.803272] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1914.803909] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1914.804204] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1914.804380] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1914.804564] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1914.804708] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1914.804896] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1914.805204] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1914.805405] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1914.805582] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1914.805761] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1914.805959] env[62525]: DEBUG nova.virt.hardware [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1914.806905] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48941eed-0787-4246-9bab-95ac6983859e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.816237] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2813a8-3ba3-4384-b102-e9ee6bb845ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.029302] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.029655] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Instance network_info: |[{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1915.030100] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:a0:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12e7bbdb-87db-4e0a-9d1d-21fc6357160d', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.037854] env[62525]: DEBUG oslo.service.loopingcall [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.038096] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1915.038333] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cfbd946-89b0-429e-98b6-8edf7314e24c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.058321] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.058321] env[62525]: value = "task-1782309" [ 1915.058321] env[62525]: _type = "Task" [ 1915.058321] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.066239] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782309, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.301734] env[62525]: DEBUG nova.compute.manager [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Received event network-changed-12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1915.302014] env[62525]: DEBUG nova.compute.manager [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Refreshing instance network info cache due to event network-changed-12e7bbdb-87db-4e0a-9d1d-21fc6357160d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1915.302279] env[62525]: DEBUG oslo_concurrency.lockutils [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.302463] env[62525]: DEBUG oslo_concurrency.lockutils [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.302650] env[62525]: DEBUG nova.network.neutron [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Refreshing network info cache for port 12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1915.373127] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.373375] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.568597] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782309, 'name': CreateVM_Task, 'duration_secs': 0.360786} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.568721] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1915.569602] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369869', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'name': 'volume-004ac126-a2b0-4eff-a790-f50f2497a817', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed5234ba-d26b-47da-8c9b-4cc591baf087', 'attached_at': '', 'detached_at': '', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'serial': '004ac126-a2b0-4eff-a790-f50f2497a817'}, 'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': '6608a6a0-27bd-4728-a4d3-c52d3ac46acd', 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62525) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1915.569819] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Root volume attach. Driver type: vmdk {{(pid=62525) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1915.570612] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ac3396-5e5b-437e-b209-0a2d171bd464 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.578475] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d4ca9d-17e5-477b-8752-1237394c38f6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.584678] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829d28a1-fe41-4605-9813-cc51f0b8a18b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.590457] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-a80a781f-f4b4-4b4b-9008-f4c28fee5f8c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.597247] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1915.597247] env[62525]: value = "task-1782310" [ 1915.597247] env[62525]: _type = "Task" [ 1915.597247] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.604745] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782310, 'name': RelocateVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.876337] env[62525]: DEBUG nova.compute.utils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1916.078363] env[62525]: DEBUG nova.network.neutron [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updated VIF entry in instance network info cache for port 12e7bbdb-87db-4e0a-9d1d-21fc6357160d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1916.078748] env[62525]: DEBUG nova.network.neutron [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.109064] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782310, 'name': RelocateVM_Task, 'duration_secs': 0.509781} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.109064] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1916.109064] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369869', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'name': 'volume-004ac126-a2b0-4eff-a790-f50f2497a817', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed5234ba-d26b-47da-8c9b-4cc591baf087', 'attached_at': '', 'detached_at': '', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'serial': '004ac126-a2b0-4eff-a790-f50f2497a817'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1916.110691] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07a430e-fe1a-402a-95e0-cc0bf1054d4d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.126066] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b83c592-191a-4c91-af66-27b52059d7e1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.148138] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-004ac126-a2b0-4eff-a790-f50f2497a817/volume-004ac126-a2b0-4eff-a790-f50f2497a817.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1916.148451] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a861330-9353-4533-be85-3faffb1f9f3e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.169951] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1916.169951] env[62525]: value = "task-1782311" [ 1916.169951] env[62525]: _type = "Task" [ 1916.169951] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.179918] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.379477] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.583289] env[62525]: DEBUG oslo_concurrency.lockutils [req-7fd8b958-51d3-4466-b972-29ce3f9a970f req-f6ac5f1b-1fbe-401f-8a1f-a03eae39a9d0 service nova] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.681730] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782311, 'name': ReconfigVM_Task, 'duration_secs': 0.280417} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.682090] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-004ac126-a2b0-4eff-a790-f50f2497a817/volume-004ac126-a2b0-4eff-a790-f50f2497a817.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1916.688408] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11f84706-2508-4b98-8612-8eccf9173a0f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.705581] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1916.705581] env[62525]: value = "task-1782312" [ 1916.705581] env[62525]: _type = "Task" [ 1916.705581] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.715484] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.215957] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782312, 'name': ReconfigVM_Task, 'duration_secs': 0.143743} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.216381] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369869', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'name': 'volume-004ac126-a2b0-4eff-a790-f50f2497a817', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed5234ba-d26b-47da-8c9b-4cc591baf087', 'attached_at': '', 'detached_at': '', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'serial': '004ac126-a2b0-4eff-a790-f50f2497a817'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1917.216816] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f16cc6af-2215-4acb-bd9b-3641c16bf78a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.222885] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1917.222885] env[62525]: value = "task-1782313" [ 1917.222885] env[62525]: _type = "Task" [ 1917.222885] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.231426] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782313, 'name': Rename_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.448761] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.449054] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.449379] env[62525]: INFO nova.compute.manager [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Attaching volume c2dbdbf8-3a7a-438d-830e-b8467d119270 to /dev/sdb [ 1917.481036] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5134e64c-74a4-4dde-b355-d3414d0c2bef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.487199] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e77537-0751-42f3-9741-8b339b8feb3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.501170] env[62525]: DEBUG nova.virt.block_device [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating existing volume attachment record: cfe74213-ed82-4e50-9bcc-d1ecf710cf8d {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1917.733192] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782313, 'name': Rename_Task, 'duration_secs': 0.125076} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.733539] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1917.733795] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffa1b9ec-d56b-4274-83b4-b7c501ad2cb3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.739948] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1917.739948] env[62525]: value = "task-1782315" [ 1917.739948] env[62525]: _type = "Task" [ 1917.739948] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.749183] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.250796] env[62525]: DEBUG oslo_vmware.api [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782315, 'name': PowerOnVM_Task, 'duration_secs': 0.444098} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.251195] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1918.251243] env[62525]: INFO nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Took 3.45 seconds to spawn the instance on the hypervisor. [ 1918.251416] env[62525]: DEBUG nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1918.252208] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5286c9d4-3b79-432e-86b7-88375538d377 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.768562] env[62525]: INFO nova.compute.manager [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Took 11.36 seconds to build instance. [ 1919.270855] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8e8d9254-5897-4220-a673-1f8aba30ffff tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.869s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.687492] env[62525]: DEBUG nova.compute.manager [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1919.687691] env[62525]: DEBUG nova.compute.manager [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing instance network info cache due to event network-changed-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1919.688683] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.688683] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.688683] env[62525]: DEBUG nova.network.neutron [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Refreshing network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1920.383912] env[62525]: DEBUG nova.network.neutron [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updated VIF entry in instance network info cache for port 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1920.384307] env[62525]: DEBUG nova.network.neutron [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.887060] env[62525]: DEBUG oslo_concurrency.lockutils [req-8b248e73-ea83-49f6-a0fe-da42c539208a req-0aa0882f-fd54-4aee-aa28-68c918c32a37 service nova] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.113509] env[62525]: DEBUG nova.compute.manager [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1921.634012] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.634395] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.713962] env[62525]: DEBUG nova.compute.manager [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Received event network-changed-12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1921.714262] env[62525]: DEBUG nova.compute.manager [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Refreshing instance network info cache due to event network-changed-12e7bbdb-87db-4e0a-9d1d-21fc6357160d. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1921.714486] env[62525]: DEBUG oslo_concurrency.lockutils [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.714631] env[62525]: DEBUG oslo_concurrency.lockutils [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.714789] env[62525]: DEBUG nova.network.neutron [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Refreshing network info cache for port 12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1922.044281] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1922.044650] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369872', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'name': 'volume-c2dbdbf8-3a7a-438d-830e-b8467d119270', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fd078815-58e6-4a3a-9da8-dd5324ea76b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'serial': 'c2dbdbf8-3a7a-438d-830e-b8467d119270'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1922.045913] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba00cca-0d27-4bac-9dee-a423b916c826 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.069460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c2e910-5d50-4d01-a96a-5ac5a503bd55 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.106470] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-c2dbdbf8-3a7a-438d-830e-b8467d119270/volume-c2dbdbf8-3a7a-438d-830e-b8467d119270.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1922.106786] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0961121-0957-4acc-b999-7c3ac2deb6d8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.132178] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1922.132178] env[62525]: value = "task-1782317" [ 1922.132178] env[62525]: _type = "Task" [ 1922.132178] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.138758] env[62525]: INFO nova.compute.claims [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1922.145317] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.430912] env[62525]: DEBUG nova.network.neutron [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updated VIF entry in instance network info cache for port 12e7bbdb-87db-4e0a-9d1d-21fc6357160d. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1922.431343] env[62525]: DEBUG nova.network.neutron [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.641584] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782317, 'name': ReconfigVM_Task, 'duration_secs': 0.329576} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.641924] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-c2dbdbf8-3a7a-438d-830e-b8467d119270/volume-c2dbdbf8-3a7a-438d-830e-b8467d119270.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1922.647771] env[62525]: INFO nova.compute.resource_tracker [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating resource usage from migration 74a483c9-afa4-41c5-b915-75bcb937a9c5 [ 1922.649983] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5cc5e39-9f52-4c49-9253-b2ba5ee60ac8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.664727] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1922.664727] env[62525]: value = "task-1782318" [ 1922.664727] env[62525]: _type = "Task" [ 1922.664727] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.672742] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.757136] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897ca84e-91e8-485b-9839-b9bf06e2cb52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.764701] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6146412c-8bc0-47b8-88a4-15b61b8cae72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.795816] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d0d954-5ee2-496f-aff5-4d9c5ee6e0a6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.804809] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd97dd3-5f79-45d9-b00d-8b9f1e370187 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.823512] env[62525]: DEBUG nova.compute.provider_tree [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.934152] env[62525]: DEBUG oslo_concurrency.lockutils [req-fff516d6-a8f0-47c8-b2eb-7f7e36a80635 req-49688182-2299-44bf-b42a-92dd964cd5ec service nova] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.175159] env[62525]: DEBUG oslo_vmware.api [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782318, 'name': ReconfigVM_Task, 'duration_secs': 0.135722} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.175468] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369872', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'name': 'volume-c2dbdbf8-3a7a-438d-830e-b8467d119270', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fd078815-58e6-4a3a-9da8-dd5324ea76b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'serial': 'c2dbdbf8-3a7a-438d-830e-b8467d119270'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1923.327288] env[62525]: DEBUG nova.scheduler.client.report [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1923.832238] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.198s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.832583] env[62525]: INFO nova.compute.manager [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Migrating [ 1924.209906] env[62525]: DEBUG nova.objects.instance [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid fd078815-58e6-4a3a-9da8-dd5324ea76b8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.346913] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.347115] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.347299] env[62525]: DEBUG nova.network.neutron [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.715363] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9d6c514d-b3d3-471d-8b08-594bca9c8a85 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.266s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.923804] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.924113] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.066335] env[62525]: DEBUG nova.network.neutron [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.427712] env[62525]: INFO nova.compute.manager [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Detaching volume c2dbdbf8-3a7a-438d-830e-b8467d119270 [ 1925.457177] env[62525]: INFO nova.virt.block_device [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Attempting to driver detach volume c2dbdbf8-3a7a-438d-830e-b8467d119270 from mountpoint /dev/sdb [ 1925.457422] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1925.457613] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369872', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'name': 'volume-c2dbdbf8-3a7a-438d-830e-b8467d119270', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fd078815-58e6-4a3a-9da8-dd5324ea76b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'serial': 'c2dbdbf8-3a7a-438d-830e-b8467d119270'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1925.458568] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083f2c31-8655-4b80-8a14-d7d6e78f1447 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.480801] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbd807c-90cf-431b-affa-4f011ae9c5e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.487871] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600f73a3-e921-4ca2-8460-6eaa82d86e58 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.508885] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4607fe-a1f3-4202-8f8e-5214e00fc9e9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.524059] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] The volume has not been displaced from its original location: [datastore1] volume-c2dbdbf8-3a7a-438d-830e-b8467d119270/volume-c2dbdbf8-3a7a-438d-830e-b8467d119270.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1925.529175] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1925.529480] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b50b541-7e30-497b-9b06-1913c6b6bf01 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.547205] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1925.547205] env[62525]: value = "task-1782319" [ 1925.547205] env[62525]: _type = "Task" [ 1925.547205] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.554998] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.568617] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.059235] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782319, 'name': ReconfigVM_Task, 'duration_secs': 0.254024} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.059634] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1926.064219] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21d7de39-4e57-4c38-a5b8-65c0509364f4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.083182] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1926.083182] env[62525]: value = "task-1782320" [ 1926.083182] env[62525]: _type = "Task" [ 1926.083182] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.091671] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.594582] env[62525]: DEBUG oslo_vmware.api [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782320, 'name': ReconfigVM_Task, 'duration_secs': 0.140258} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.595602] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369872', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'name': 'volume-c2dbdbf8-3a7a-438d-830e-b8467d119270', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fd078815-58e6-4a3a-9da8-dd5324ea76b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'c2dbdbf8-3a7a-438d-830e-b8467d119270', 'serial': 'c2dbdbf8-3a7a-438d-830e-b8467d119270'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1927.087884] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd209929-86d1-4e9f-93ce-a3bac2c6236c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.106027] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1927.139075] env[62525]: DEBUG nova.objects.instance [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'flavor' on Instance uuid fd078815-58e6-4a3a-9da8-dd5324ea76b8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.611918] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1927.612273] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a61ae253-f329-461d-9146-ef47dd9c00fa {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.619609] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1927.619609] env[62525]: value = "task-1782321" [ 1927.619609] env[62525]: _type = "Task" [ 1927.619609] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.627681] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782321, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.130453] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782321, 'name': PowerOffVM_Task, 'duration_secs': 0.146153} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.130877] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1928.130932] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1928.144942] env[62525]: DEBUG oslo_concurrency.lockutils [None req-a104f2aa-5e2c-481f-972f-99ba905d82d6 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.637034] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1928.637225] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1928.637386] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1928.637574] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1928.637725] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1928.637874] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1928.638091] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1928.638256] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1928.638419] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1928.638583] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1928.638757] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1928.643994] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05bb267b-3b91-40ed-b398-dc71617b718a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.663403] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1928.663403] env[62525]: value = "task-1782322" [ 1928.663403] env[62525]: _type = "Task" [ 1928.663403] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.673783] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.164993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.165337] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.165452] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.165637] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.165810] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.170732] env[62525]: INFO nova.compute.manager [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Terminating instance [ 1929.172707] env[62525]: DEBUG nova.compute.manager [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1929.172903] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1929.173696] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b11174a-b516-47dc-afde-cc323cc8d64e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.179275] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782322, 'name': ReconfigVM_Task, 'duration_secs': 0.156079} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.179874] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1929.185181] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.185571] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d39f1296-cb6d-45df-8cf6-c03c1722efd4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.191639] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1929.191639] env[62525]: value = "task-1782323" [ 1929.191639] env[62525]: _type = "Task" [ 1929.191639] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.200256] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.688309] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.688573] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.688740] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.689028] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.689192] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.689345] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.689547] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.689777] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.689977] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.690166] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.690342] env[62525]: DEBUG nova.virt.hardware [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.695731] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1929.696066] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34ef593c-a3d5-4734-b5de-3f3b6102d94b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.717608] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782323, 'name': PowerOffVM_Task, 'duration_secs': 0.200079} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.718790] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1929.718969] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1929.719285] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1929.719285] env[62525]: value = "task-1782324" [ 1929.719285] env[62525]: _type = "Task" [ 1929.719285] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.719478] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe8d0e51-19f7-493f-8119-d36c2d670605 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.729282] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782324, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.915921] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1929.916174] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1929.916363] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleting the datastore file [datastore1] fd078815-58e6-4a3a-9da8-dd5324ea76b8 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1929.916641] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0438557-7482-455a-93b3-7aa0acdfe090 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.922808] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for the task: (returnval){ [ 1929.922808] env[62525]: value = "task-1782326" [ 1929.922808] env[62525]: _type = "Task" [ 1929.922808] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.930448] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.230159] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782324, 'name': ReconfigVM_Task, 'duration_secs': 0.169454} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.230509] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1930.231225] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a08fc8-b1e6-44b5-b7cd-b5c1dfd06642 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.252966] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-004ac126-a2b0-4eff-a790-f50f2497a817/volume-004ac126-a2b0-4eff-a790-f50f2497a817.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1930.253581] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6c447dc-c57a-420e-a761-4823bac04b61 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.271482] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1930.271482] env[62525]: value = "task-1782327" [ 1930.271482] env[62525]: _type = "Task" [ 1930.271482] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.279221] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.432934] env[62525]: DEBUG oslo_vmware.api [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Task: {'id': task-1782326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152817} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.433175] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1930.433397] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1930.433627] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1930.433788] env[62525]: INFO nova.compute.manager [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1930.434036] env[62525]: DEBUG oslo.service.loopingcall [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1930.434232] env[62525]: DEBUG nova.compute.manager [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1930.434327] env[62525]: DEBUG nova.network.neutron [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1930.785488] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782327, 'name': ReconfigVM_Task, 'duration_secs': 0.253624} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.785671] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-004ac126-a2b0-4eff-a790-f50f2497a817/volume-004ac126-a2b0-4eff-a790-f50f2497a817.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1930.786086] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1930.913612] env[62525]: DEBUG nova.compute.manager [req-3d43f7e6-8575-49eb-ab93-c96ae97e4c66 req-70cd1cc0-cb4b-4009-9b0a-b5838d242776 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Received event network-vif-deleted-4f9698c8-b319-4565-b308-ac2450ee865f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1930.913836] env[62525]: INFO nova.compute.manager [req-3d43f7e6-8575-49eb-ab93-c96ae97e4c66 req-70cd1cc0-cb4b-4009-9b0a-b5838d242776 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Neutron deleted interface 4f9698c8-b319-4565-b308-ac2450ee865f; detaching it from the instance and deleting it from the info cache [ 1930.914023] env[62525]: DEBUG nova.network.neutron [req-3d43f7e6-8575-49eb-ab93-c96ae97e4c66 req-70cd1cc0-cb4b-4009-9b0a-b5838d242776 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.293098] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eaadf8-8761-4633-8af9-eab6b9749fda {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.312645] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b2ef80-9bff-42f8-ab38-371c0d7a11ff {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.330370] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1931.390639] env[62525]: DEBUG nova.network.neutron [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.417285] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1a6a6ba-40e5-4b6f-90dd-0ed851576a71 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.426877] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a17884-2b8e-4882-b04b-6df224a30a3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.456692] env[62525]: DEBUG nova.compute.manager [req-3d43f7e6-8575-49eb-ab93-c96ae97e4c66 req-70cd1cc0-cb4b-4009-9b0a-b5838d242776 service nova] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Detach interface failed, port_id=4f9698c8-b319-4565-b308-ac2450ee865f, reason: Instance fd078815-58e6-4a3a-9da8-dd5324ea76b8 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1931.892866] env[62525]: INFO nova.compute.manager [-] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Took 1.46 seconds to deallocate network for instance. [ 1932.398894] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.399282] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.399380] env[62525]: DEBUG nova.objects.instance [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lazy-loading 'resources' on Instance uuid fd078815-58e6-4a3a-9da8-dd5324ea76b8 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.966466] env[62525]: DEBUG nova.network.neutron [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Port 12e7bbdb-87db-4e0a-9d1d-21fc6357160d binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1933.008532] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ef57db-e338-4983-9360-00defa0e6a76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.016087] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e0b026-50e2-416f-9516-2573e0baac6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.046028] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5e8fbb-4c95-4730-86f1-b017dc1b4d6c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.052961] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48753a9f-b6dc-43df-bdd7-5baaf6fc2309 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.065825] env[62525]: DEBUG nova.compute.provider_tree [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.568855] env[62525]: DEBUG nova.scheduler.client.report [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1933.990302] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.991048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.991048] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.073447] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.091637] env[62525]: INFO nova.scheduler.client.report [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Deleted allocations for instance fd078815-58e6-4a3a-9da8-dd5324ea76b8 [ 1934.598581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3e9de51e-0995-4f14-97fd-df811187c476 tempest-AttachVolumeNegativeTest-1425949199 tempest-AttachVolumeNegativeTest-1425949199-project-member] Lock "fd078815-58e6-4a3a-9da8-dd5324ea76b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.433s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.027092] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.027288] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.027463] env[62525]: DEBUG nova.network.neutron [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.734416] env[62525]: DEBUG nova.network.neutron [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.237142] env[62525]: DEBUG oslo_concurrency.lockutils [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.746749] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac9a565-9cf1-49dd-b7ed-2737a87d8bd0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.753916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172927f4-4861-4db5-863b-cdf21519b86e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.845423] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8c53bf-5847-4c3a-9f56-c2e74aa1c09b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.866995] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f3da77-5acb-40e9-bbda-a5a13eefc9c5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.874236] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1938.380868] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1938.381213] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8803eb7b-4b04-44fe-a8ce-0472ea0b9edf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.388941] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1938.388941] env[62525]: value = "task-1782329" [ 1938.388941] env[62525]: _type = "Task" [ 1938.388941] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.396641] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.898540] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782329, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.398897] env[62525]: DEBUG oslo_vmware.api [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782329, 'name': PowerOnVM_Task, 'duration_secs': 0.63196} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.399174] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1939.399400] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-454bcdb6-c9a2-4ece-a4a8-59cbd71807ca tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance 'ed5234ba-d26b-47da-8c9b-4cc591baf087' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1941.600352] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.600709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.600814] env[62525]: DEBUG nova.compute.manager [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Going to confirm migration 8 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1942.181815] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.182014] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquired lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.182270] env[62525]: DEBUG nova.network.neutron [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1942.182481] env[62525]: DEBUG nova.objects.instance [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'info_cache' on Instance uuid ed5234ba-d26b-47da-8c9b-4cc591baf087 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1942.678670] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "abd538d5-f433-4896-9871-5cdef303cda0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.679060] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.679060] env[62525]: DEBUG nova.compute.manager [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1942.680020] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec019dd-a335-4c92-9db2-937533cf41dc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.688944] env[62525]: DEBUG nova.compute.manager [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1942.689504] env[62525]: DEBUG nova.objects.instance [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'flavor' on Instance uuid abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1942.701409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.701619] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.701783] env[62525]: INFO nova.compute.manager [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Shelving [ 1943.195935] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1943.196217] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-985855d7-0eb9-4088-9072-0ebd5f84d921 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.205625] env[62525]: DEBUG oslo_vmware.api [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1943.205625] env[62525]: value = "task-1782332" [ 1943.205625] env[62525]: _type = "Task" [ 1943.205625] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.211299] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1943.211299] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c0b1b6e-7554-47b6-a4cb-ebd8c053dde9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.218317] env[62525]: DEBUG oslo_vmware.api [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.220141] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1943.220141] env[62525]: value = "task-1782333" [ 1943.220141] env[62525]: _type = "Task" [ 1943.220141] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.232604] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.439090] env[62525]: DEBUG nova.network.neutron [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [{"id": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "address": "fa:16:3e:c4:a0:a5", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12e7bbdb-87", "ovs_interfaceid": "12e7bbdb-87db-4e0a-9d1d-21fc6357160d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.563819] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.564116] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.717369] env[62525]: DEBUG oslo_vmware.api [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782332, 'name': PowerOffVM_Task, 'duration_secs': 0.188445} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.717729] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1943.717729] env[62525]: DEBUG nova.compute.manager [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1943.718498] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546e65c3-a776-48f0-ae97-cce4a03502a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.730904] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782333, 'name': PowerOffVM_Task, 'duration_secs': 0.179462} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.731358] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1943.732076] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d139e7-e340-45ff-a81b-70acf1ecb3b4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.751106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480821f0-34d0-4290-9192-86fb48206f9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.941881] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Releasing lock "refresh_cache-ed5234ba-d26b-47da-8c9b-4cc591baf087" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.942211] env[62525]: DEBUG nova.objects.instance [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'migration_context' on Instance uuid ed5234ba-d26b-47da-8c9b-4cc591baf087 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1944.070039] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1944.070203] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1944.070245] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1944.103547] env[62525]: DEBUG nova.compute.manager [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1944.104819] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a82927a-df37-40af-a50c-d32804f9fec6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.234504] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9f6d7da0-e203-47b5-a04d-193dac61ddcd tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.261222] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1944.261485] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-528ae709-082c-4512-b692-1b55c6e46e19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.270541] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1944.270541] env[62525]: value = "task-1782334" [ 1944.270541] env[62525]: _type = "Task" [ 1944.270541] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.279064] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782334, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.444821] env[62525]: DEBUG nova.objects.base [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1944.445829] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590ea1ee-378a-4074-acd5-8406f54a6880 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.464942] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b6596b4-41e9-40bc-98a4-22d521e70515 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.470635] env[62525]: DEBUG oslo_vmware.api [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1944.470635] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52cde399-217b-a813-b8d0-c9447e8719fd" [ 1944.470635] env[62525]: _type = "Task" [ 1944.470635] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.478744] env[62525]: DEBUG oslo_vmware.api [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cde399-217b-a813-b8d0-c9447e8719fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.604770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.604983] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.605144] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1944.605305] env[62525]: DEBUG nova.objects.instance [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lazy-loading 'info_cache' on Instance uuid 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1944.615144] env[62525]: INFO nova.compute.manager [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] instance snapshotting [ 1944.615743] env[62525]: DEBUG nova.objects.instance [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1944.782934] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782334, 'name': CreateSnapshot_Task, 'duration_secs': 0.40813} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.783235] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1944.783759] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd52c2f-4502-4618-a310-50bf317ad8c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.980670] env[62525]: DEBUG oslo_vmware.api [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52cde399-217b-a813-b8d0-c9447e8719fd, 'name': SearchDatastore_Task, 'duration_secs': 0.007968} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.980933] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.981144] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.122653] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a8dfca-b48b-4b57-8c89-cba6d121aaa2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.146518] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317d9442-ec54-4a37-8a96-7949b431660b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.170199] env[62525]: DEBUG nova.objects.instance [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'flavor' on Instance uuid abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1945.302724] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1945.303048] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3e35c45b-321a-4a99-a853-77ce91e3b80f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.312320] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1945.312320] env[62525]: value = "task-1782335" [ 1945.312320] env[62525]: _type = "Task" [ 1945.312320] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.320609] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782335, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.588304] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adab5b95-8051-4a1a-bea2-5b4dd01623ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.595857] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4225cff9-26fd-4078-8b52-6640e5fc131f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.632153] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f801ea-167b-4d8f-8e05-0c8176ad8a64 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.639902] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9050c4c5-30a2-41a5-b6cc-9a12105373b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.653830] env[62525]: DEBUG nova.compute.provider_tree [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1945.656602] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1945.656884] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c6370b4c-3adc-4178-ab5b-b3625cf37cdd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.665757] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1945.665757] env[62525]: value = "task-1782336" [ 1945.665757] env[62525]: _type = "Task" [ 1945.665757] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.677927] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782336, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.678537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.678654] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.678826] env[62525]: DEBUG nova.network.neutron [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1945.679010] env[62525]: DEBUG nova.objects.instance [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'info_cache' on Instance uuid abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1945.823128] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782335, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.157877] env[62525]: DEBUG nova.scheduler.client.report [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1946.176400] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782336, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.182182] env[62525]: DEBUG nova.objects.base [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1946.323361] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782335, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.392439] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [{"id": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "address": "fa:16:3e:43:49:a1", "network": {"id": "69959178-a900-4fe0-a8f1-287aadcdf430", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1469671341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab7fca262814290a975bf85badc9b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d67f7f-d8", "ovs_interfaceid": "52d67f7f-d861-4c0b-bfa2-c2f41085fb6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.678458] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782336, 'name': CreateSnapshot_Task, 'duration_secs': 0.606252} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.678791] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1946.679896] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae69baff-99cb-4a1f-bb96-da8ab62bbe77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.823341] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782335, 'name': CloneVM_Task, 'duration_secs': 1.223709} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.823713] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Created linked-clone VM from snapshot [ 1946.824493] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdd0f63-6a8a-4d3a-97f3-5614b0f97023 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.832537] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Uploading image 10429895-8278-4c9a-a80c-8a0d17d7ac47 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1946.857064] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1946.857064] env[62525]: value = "vm-369874" [ 1946.857064] env[62525]: _type = "VirtualMachine" [ 1946.857064] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1946.857513] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8bdce2cc-5c5c-4c32-9697-7455d1766136 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.866750] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease: (returnval){ [ 1946.866750] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223c7d6-cdc0-5185-abba-37eb67441a50" [ 1946.866750] env[62525]: _type = "HttpNfcLease" [ 1946.866750] env[62525]: } obtained for exporting VM: (result){ [ 1946.866750] env[62525]: value = "vm-369874" [ 1946.866750] env[62525]: _type = "VirtualMachine" [ 1946.866750] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1946.867189] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the lease: (returnval){ [ 1946.867189] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223c7d6-cdc0-5185-abba-37eb67441a50" [ 1946.867189] env[62525]: _type = "HttpNfcLease" [ 1946.867189] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1946.877151] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1946.877151] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223c7d6-cdc0-5185-abba-37eb67441a50" [ 1946.877151] env[62525]: _type = "HttpNfcLease" [ 1946.877151] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1946.896151] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.896475] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1946.896803] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.897081] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.897336] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.897586] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.897825] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.898084] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.898306] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1946.898563] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.969927] env[62525]: DEBUG nova.network.neutron [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.173229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.192s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.203661] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1947.203993] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4b85fafc-098b-4328-a067-4530d564debb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.213016] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1947.213016] env[62525]: value = "task-1782338" [ 1947.213016] env[62525]: _type = "Task" [ 1947.213016] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.221707] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782338, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.375351] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1947.375351] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223c7d6-cdc0-5185-abba-37eb67441a50" [ 1947.375351] env[62525]: _type = "HttpNfcLease" [ 1947.375351] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1947.375635] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1947.375635] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5223c7d6-cdc0-5185-abba-37eb67441a50" [ 1947.375635] env[62525]: _type = "HttpNfcLease" [ 1947.375635] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1947.376283] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fbf78c-2c46-46fb-bd18-e8b05510c836 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.383612] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1947.383789] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1947.441734] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.441966] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.442164] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.442309] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1947.443205] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88e5277-4e00-4aa2-ae38-0138d793d7b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.452638] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d33c21-2c2b-4858-8132-792df9f21e29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.468634] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae634ccd-78d7-4f71-aaad-07ebdf9369cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.472195] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.476514] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06f7b79-7377-4528-92c7-10807bd55c73 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.509483] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180588MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1947.509660] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.509831] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.514408] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e78f3e9b-421f-4234-8321-136c7acec0b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.724947] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782338, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.738434] env[62525]: INFO nova.scheduler.client.report [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocation for migration 74a483c9-afa4-41c5-b915-75bcb937a9c5 [ 1947.977825] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1947.978251] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-500d2431-fe9c-4023-9bf6-ec9dbc482b16 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.985354] env[62525]: DEBUG oslo_vmware.api [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1947.985354] env[62525]: value = "task-1782339" [ 1947.985354] env[62525]: _type = "Task" [ 1947.985354] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.995116] env[62525]: DEBUG oslo_vmware.api [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.225427] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782338, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.245237] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cb0375e4-7d05-411f-8e8d-400908074037 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.644s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.284797] env[62525]: INFO nova.compute.manager [None req-6bd397cd-95ca-466e-9d95-975d3cec1ce5 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Get console output [ 1948.285177] env[62525]: WARNING nova.virt.vmwareapi.driver [None req-6bd397cd-95ca-466e-9d95-975d3cec1ce5 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] The console log is missing. Check your VSPC configuration [ 1948.498363] env[62525]: DEBUG oslo_vmware.api [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782339, 'name': PowerOnVM_Task, 'duration_secs': 0.420857} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.498785] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.499084] env[62525]: DEBUG nova.compute.manager [None req-f86471bd-53e3-4c3e-a6cb-7b8870713e5d tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1948.500159] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb92a69-277f-4be3-9ff9-3d96047a9720 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.542680] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 870d7795-49ca-4201-983a-a85b590e805e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance abd538d5-f433-4896-9871-5cdef303cda0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance ed5234ba-d26b-47da-8c9b-4cc591baf087 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1948.544185] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1948.633748] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ad9c5c-52d5-4975-875f-7c7a60b02b10 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.641678] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8fba55-78cb-484e-a24a-411ce3189135 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.677124] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c32e951-2cac-4972-bdce-0b745912fc84 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.685786] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d4180a-83fc-4ca5-85bc-b6a930af9208 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.700145] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1948.725056] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782338, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.203507] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1949.226858] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782338, 'name': CloneVM_Task, 'duration_secs': 1.730416} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.227224] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created linked-clone VM from snapshot [ 1949.228152] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abddff33-ab8a-4e46-ae19-2b325363abb2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.235952] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploading image 4f6edf16-9b31-4c97-b7c6-52b755e6445a {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1949.259440] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1949.259440] env[62525]: value = "vm-369876" [ 1949.259440] env[62525]: _type = "VirtualMachine" [ 1949.259440] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1949.259834] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-12c0d2fb-d645-4500-b887-8806118cae95 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.266948] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease: (returnval){ [ 1949.266948] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d8c4f5-5364-94b0-cc4a-fcb739e0c388" [ 1949.266948] env[62525]: _type = "HttpNfcLease" [ 1949.266948] env[62525]: } obtained for exporting VM: (result){ [ 1949.266948] env[62525]: value = "vm-369876" [ 1949.266948] env[62525]: _type = "VirtualMachine" [ 1949.266948] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1949.267566] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the lease: (returnval){ [ 1949.267566] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d8c4f5-5364-94b0-cc4a-fcb739e0c388" [ 1949.267566] env[62525]: _type = "HttpNfcLease" [ 1949.267566] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1949.273840] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1949.273840] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d8c4f5-5364-94b0-cc4a-fcb739e0c388" [ 1949.273840] env[62525]: _type = "HttpNfcLease" [ 1949.273840] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1949.708826] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1949.708931] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.199s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.776204] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1949.776204] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d8c4f5-5364-94b0-cc4a-fcb739e0c388" [ 1949.776204] env[62525]: _type = "HttpNfcLease" [ 1949.776204] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1949.776636] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1949.776636] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d8c4f5-5364-94b0-cc4a-fcb739e0c388" [ 1949.776636] env[62525]: _type = "HttpNfcLease" [ 1949.776636] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1949.777498] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d340258-2897-4961-8646-430a8525b5ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.785152] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1949.785338] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1949.892999] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-26030817-5ac9-40f2-9b94-5c49a4221500 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.403036] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a316aec-ff96-4424-8e23-c6bf5b64a3b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.410890] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Suspending the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1950.411402] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0f070706-ebe6-4f07-af5f-463ed6ae15d7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.418638] env[62525]: DEBUG oslo_vmware.api [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1950.418638] env[62525]: value = "task-1782341" [ 1950.418638] env[62525]: _type = "Task" [ 1950.418638] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.427666] env[62525]: DEBUG oslo_vmware.api [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782341, 'name': SuspendVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.929802] env[62525]: DEBUG oslo_vmware.api [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782341, 'name': SuspendVM_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.433459] env[62525]: DEBUG oslo_vmware.api [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782341, 'name': SuspendVM_Task, 'duration_secs': 0.629861} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.434790] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Suspended the VM {{(pid=62525) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1951.434790] env[62525]: DEBUG nova.compute.manager [None req-721a7270-688a-43d0-b621-01cac8357b7f tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1951.435749] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1feb393c-90dc-4bea-ac62-465329a5879c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.778514] env[62525]: INFO nova.compute.manager [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Resuming [ 1952.779234] env[62525]: DEBUG nova.objects.instance [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'flavor' on Instance uuid abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1953.788213] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.788556] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquired lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.788644] env[62525]: DEBUG nova.network.neutron [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1954.516532] env[62525]: DEBUG nova.network.neutron [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [{"id": "6d956a60-0763-439d-9f38-a05ab94cca9f", "address": "fa:16:3e:fe:0f:d4", "network": {"id": "3832b5f8-2732-49d6-b0aa-ca9724193776", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2021389950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3900af0b29fa40beb95a4260054c8e5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d956a60-07", "ovs_interfaceid": "6d956a60-0763-439d-9f38-a05ab94cca9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.019581] env[62525]: DEBUG oslo_concurrency.lockutils [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Releasing lock "refresh_cache-abd538d5-f433-4896-9871-5cdef303cda0" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.020743] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f0e895-81d1-4287-b96c-d783410745b3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.028543] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Resuming the VM {{(pid=62525) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1955.028857] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-312f94be-6bff-4d62-b29e-79f4d2cffd53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.035967] env[62525]: DEBUG oslo_vmware.api [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1955.035967] env[62525]: value = "task-1782342" [ 1955.035967] env[62525]: _type = "Task" [ 1955.035967] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.044217] env[62525]: DEBUG oslo_vmware.api [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.343892] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1955.344889] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44af9fee-5c65-453a-857f-595ac9742009 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.351274] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1955.351466] env[62525]: ERROR oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk due to incomplete transfer. [ 1955.351708] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f6cea528-6cd3-4979-8911-0e6610813ce6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.359730] env[62525]: DEBUG oslo_vmware.rw_handles [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52157aa2-3b5c-2fcc-4725-9874ea35fe90/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1955.359915] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Uploaded image 10429895-8278-4c9a-a80c-8a0d17d7ac47 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1955.362445] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1955.362762] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-73f2763e-e6ca-42f4-89f9-2103564d3bd8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.369035] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1955.369035] env[62525]: value = "task-1782343" [ 1955.369035] env[62525]: _type = "Task" [ 1955.369035] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.381893] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782343, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.548905] env[62525]: DEBUG oslo_vmware.api [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782342, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.879615] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782343, 'name': Destroy_Task, 'duration_secs': 0.482233} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.879854] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Destroyed the VM [ 1955.880101] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1955.880387] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b261c088-8c7f-49ec-9ec5-5ec593088427 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.886998] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1955.886998] env[62525]: value = "task-1782344" [ 1955.886998] env[62525]: _type = "Task" [ 1955.886998] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.895368] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782344, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.046223] env[62525]: DEBUG oslo_vmware.api [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782342, 'name': PowerOnVM_Task, 'duration_secs': 0.534625} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.046610] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Resumed the VM {{(pid=62525) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1956.046712] env[62525]: DEBUG nova.compute.manager [None req-9186ac4a-7c75-4039-a4bb-35d32b7afa10 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1956.047491] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7659503b-5432-4ca2-a536-8968a3cb72cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.397057] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782344, 'name': RemoveSnapshot_Task, 'duration_secs': 0.450687} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.397277] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1956.397565] env[62525]: DEBUG nova.compute.manager [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1956.398346] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299f3815-30e0-42a5-b0fd-a294572f1eba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.911343] env[62525]: INFO nova.compute.manager [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Shelve offloading [ 1956.913082] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.913306] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91dfc96f-679e-4a03-945a-80ef2f870850 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.921034] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1956.921034] env[62525]: value = "task-1782345" [ 1956.921034] env[62525]: _type = "Task" [ 1956.921034] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.929460] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.436699] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1957.437139] env[62525]: DEBUG nova.compute.manager [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1957.438043] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd769d5-0c6f-4d44-a01e-b62dfa650107 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.445511] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.445744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.446064] env[62525]: DEBUG nova.network.neutron [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.513978] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "abd538d5-f433-4896-9871-5cdef303cda0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.514280] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.514474] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "abd538d5-f433-4896-9871-5cdef303cda0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.514692] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.514868] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.516926] env[62525]: INFO nova.compute.manager [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Terminating instance [ 1957.518717] env[62525]: DEBUG nova.compute.manager [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1957.518913] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1957.519754] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156fca1e-1089-4f87-9c92-8865c058283d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.527856] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1957.528109] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a788dc04-d872-4d05-accb-ebe2a57ca771 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.534766] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1957.534766] env[62525]: value = "task-1782346" [ 1957.534766] env[62525]: _type = "Task" [ 1957.534766] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.542489] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.905373] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1957.906323] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5031fa-46e8-4bad-852e-364f0a9f9f25 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.912430] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1957.912593] env[62525]: ERROR oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk due to incomplete transfer. [ 1957.912804] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d256edf8-08f0-4c68-9c0d-2a0eecc13145 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.919570] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525d4508-14b5-43fa-258f-6fe0af28f96b/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1957.919762] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploaded image 4f6edf16-9b31-4c97-b7c6-52b755e6445a to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1957.921451] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1957.921677] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eab677b5-9b27-4d2f-b3de-7d38f9b5145e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.927483] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1957.927483] env[62525]: value = "task-1782347" [ 1957.927483] env[62525]: _type = "Task" [ 1957.927483] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.935144] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782347, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.044732] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782346, 'name': PowerOffVM_Task, 'duration_secs': 0.192411} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.045355] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1958.045355] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1958.045517] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4347770f-088a-4e66-b8d3-8b9ce02ac26e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.113171] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1958.113485] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1958.113603] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleting the datastore file [datastore1] abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1958.113883] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce87b142-4885-462e-896f-51728d1114d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.122114] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for the task: (returnval){ [ 1958.122114] env[62525]: value = "task-1782349" [ 1958.122114] env[62525]: _type = "Task" [ 1958.122114] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.129745] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.187634] env[62525]: DEBUG nova.network.neutron [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.437233] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782347, 'name': Destroy_Task, 'duration_secs': 0.35631} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.437597] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroyed the VM [ 1958.437822] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1958.438083] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0304335e-9992-40c9-9661-68cd962f0ec2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.444278] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1958.444278] env[62525]: value = "task-1782350" [ 1958.444278] env[62525]: _type = "Task" [ 1958.444278] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.452088] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782350, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.631819] env[62525]: DEBUG oslo_vmware.api [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Task: {'id': task-1782349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181568} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.632097] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1958.632284] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1958.632461] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1958.632667] env[62525]: INFO nova.compute.manager [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1958.632905] env[62525]: DEBUG oslo.service.loopingcall [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.633114] env[62525]: DEBUG nova.compute.manager [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1958.633210] env[62525]: DEBUG nova.network.neutron [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1958.690298] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.954264] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782350, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.099461] env[62525]: DEBUG nova.compute.manager [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-vif-unplugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1959.099461] env[62525]: DEBUG oslo_concurrency.lockutils [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.099687] env[62525]: DEBUG oslo_concurrency.lockutils [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.099758] env[62525]: DEBUG oslo_concurrency.lockutils [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.102188] env[62525]: DEBUG nova.compute.manager [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] No waiting events found dispatching network-vif-unplugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1959.102188] env[62525]: WARNING nova.compute.manager [req-10ef5b38-5c6c-457c-aa69-d7f1b38c0fd1 req-4757d206-30db-4e38-802d-acf97cefaf88 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received unexpected event network-vif-unplugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 for instance with vm_state shelved and task_state shelving_offloading. [ 1959.424924] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.426106] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0abf99a-f083-4ebd-88ea-48ce144b3d79 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.433652] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1959.434664] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab68114d-9588-48d3-bcf2-dac67f6fcdbf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.456482] env[62525]: DEBUG oslo_vmware.api [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782350, 'name': RemoveSnapshot_Task, 'duration_secs': 0.524251} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.456740] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1959.456824] env[62525]: INFO nova.compute.manager [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 14.34 seconds to snapshot the instance on the hypervisor. [ 1959.517030] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.517030] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.517030] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.517304] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1dab30c-d0b0-4518-81aa-0a8d5c4f1150 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.524524] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1959.524524] env[62525]: value = "task-1782352" [ 1959.524524] env[62525]: _type = "Task" [ 1959.524524] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.532111] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.963434] env[62525]: DEBUG nova.network.neutron [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.019792] env[62525]: DEBUG nova.compute.manager [None req-3b337daf-3305-4bb6-a983-44761c5714b1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Found 1 images (rotation: 2) {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1960.034431] env[62525]: DEBUG oslo_vmware.api [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1324} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.035170] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1960.035170] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1960.035170] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.063792] env[62525]: INFO nova.scheduler.client.report [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted allocations for instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 [ 1960.468048] env[62525]: INFO nova.compute.manager [-] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Took 1.83 seconds to deallocate network for instance. [ 1960.568172] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.568405] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.568641] env[62525]: DEBUG nova.objects.instance [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'resources' on Instance uuid 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1960.575108] env[62525]: DEBUG nova.compute.manager [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1960.576033] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67aa38e-936a-4c9b-9c8c-9bf106f2d917 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.974586] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.071620] env[62525]: DEBUG nova.objects.instance [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'numa_topology' on Instance uuid 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.086068] env[62525]: INFO nova.compute.manager [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] instance snapshotting [ 1961.086712] env[62525]: DEBUG nova.objects.instance [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.127293] env[62525]: DEBUG nova.compute.manager [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1961.127536] env[62525]: DEBUG nova.compute.manager [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing instance network info cache due to event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1961.127799] env[62525]: DEBUG oslo_concurrency.lockutils [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.127982] env[62525]: DEBUG oslo_concurrency.lockutils [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.128185] env[62525]: DEBUG nova.network.neutron [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1961.574739] env[62525]: DEBUG nova.objects.base [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Object Instance<0e18bcbf-1b77-46cb-99d4-62668b9c8a55> lazy-loaded attributes: resources,numa_topology {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1961.592232] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed361a4f-73a3-4502-b863-6a3b0b387aca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.615868] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664e808e-ec08-4f40-8b25-0e8524850de3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.660073] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9aa151-49d5-439d-bea6-10ffa6a7d89b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.669373] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e208e1-8115-4c1e-9146-f53d2b525f9b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.702085] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f9fc9e-24fd-47d7-a90f-2038ff980ad0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.709781] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee3fc8e-a57d-4d0c-8daf-483339fa3107 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.727015] env[62525]: DEBUG nova.compute.provider_tree [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.862881] env[62525]: DEBUG nova.network.neutron [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updated VIF entry in instance network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.863292] env[62525]: DEBUG nova.network.neutron [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.126225] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1962.126514] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8928ab7c-5842-42c9-b3ed-58b66851853c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.134900] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1962.134900] env[62525]: value = "task-1782353" [ 1962.134900] env[62525]: _type = "Task" [ 1962.134900] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.144890] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782353, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.230758] env[62525]: DEBUG nova.scheduler.client.report [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1962.366462] env[62525]: DEBUG oslo_concurrency.lockutils [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.366715] env[62525]: DEBUG nova.compute.manager [req-6485d414-7ae5-4f48-afa4-c27e6504a738 req-17eb827e-da91-4e1c-9709-395b15b4f63e service nova] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Received event network-vif-deleted-6d956a60-0763-439d-9f38-a05ab94cca9f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1962.645176] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782353, 'name': CreateSnapshot_Task, 'duration_secs': 0.426192} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.645525] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1962.646198] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290e7c08-3f41-4c17-843f-503e305c1fed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.735356] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.737770] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.763s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.737994] env[62525]: DEBUG nova.objects.instance [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lazy-loading 'resources' on Instance uuid abd538d5-f433-4896-9871-5cdef303cda0 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.997380] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.162845] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1963.163187] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d0bbec4a-a795-4d5e-b8e6-3c8bf5e6d2ee {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.172715] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1963.172715] env[62525]: value = "task-1782354" [ 1963.172715] env[62525]: _type = "Task" [ 1963.172715] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.180417] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782354, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.245744] env[62525]: DEBUG oslo_concurrency.lockutils [None req-531a29d5-d1f8-486b-b3fb-a2b65ef1d1aa tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.544s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.246776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.249s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.246776] env[62525]: INFO nova.compute.manager [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Unshelving [ 1963.323191] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb97d446-30d3-47c4-bf28-929cfdc7bec2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.332194] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f4df2-42e9-4a32-9a11-49eb3e2d87f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.365671] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356e61a-b14e-42f2-abc3-8b9ec67d2e72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.373543] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251bb456-043c-41ea-ab10-9562ee37d709 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.386799] env[62525]: DEBUG nova.compute.provider_tree [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.683485] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782354, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.889964] env[62525]: DEBUG nova.scheduler.client.report [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1964.182983] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782354, 'name': CloneVM_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.267674] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.394922] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.397298] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.130s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.397531] env[62525]: DEBUG nova.objects.instance [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'pci_requests' on Instance uuid 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.411352] env[62525]: INFO nova.scheduler.client.report [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Deleted allocations for instance abd538d5-f433-4896-9871-5cdef303cda0 [ 1964.684443] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782354, 'name': CloneVM_Task, 'duration_secs': 1.042165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.684836] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created linked-clone VM from snapshot [ 1964.685527] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f508c0a-43bd-42b6-9c3b-9e0e803d8586 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.692793] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploading image de336897-32c6-47c1-9b97-3fc23d47285b {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1964.713081] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1964.713081] env[62525]: value = "vm-369878" [ 1964.713081] env[62525]: _type = "VirtualMachine" [ 1964.713081] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1964.713457] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-533a2e0a-dc78-41c0-b0c7-ca38c5732047 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.720377] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease: (returnval){ [ 1964.720377] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e538d-3a2f-ec60-eb2e-54701f3ab784" [ 1964.720377] env[62525]: _type = "HttpNfcLease" [ 1964.720377] env[62525]: } obtained for exporting VM: (result){ [ 1964.720377] env[62525]: value = "vm-369878" [ 1964.720377] env[62525]: _type = "VirtualMachine" [ 1964.720377] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1964.720660] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the lease: (returnval){ [ 1964.720660] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e538d-3a2f-ec60-eb2e-54701f3ab784" [ 1964.720660] env[62525]: _type = "HttpNfcLease" [ 1964.720660] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1964.727323] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1964.727323] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e538d-3a2f-ec60-eb2e-54701f3ab784" [ 1964.727323] env[62525]: _type = "HttpNfcLease" [ 1964.727323] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1964.901589] env[62525]: DEBUG nova.objects.instance [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'numa_topology' on Instance uuid 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.918889] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e94a78c1-e5a8-4a16-9e22-6ae681a51521 tempest-ServerActionsTestJSON-1523578820 tempest-ServerActionsTestJSON-1523578820-project-member] Lock "abd538d5-f433-4896-9871-5cdef303cda0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.404s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.238187] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1965.238187] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e538d-3a2f-ec60-eb2e-54701f3ab784" [ 1965.238187] env[62525]: _type = "HttpNfcLease" [ 1965.238187] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1965.238448] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1965.238448] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]528e538d-3a2f-ec60-eb2e-54701f3ab784" [ 1965.238448] env[62525]: _type = "HttpNfcLease" [ 1965.238448] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1965.239223] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816abe96-3211-4806-92b1-258357382e9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.248658] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1965.248658] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1965.345983] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5d0221c7-9e43-4c42-b675-94f1fbd5591f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.404392] env[62525]: INFO nova.compute.claims [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1966.506032] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e840ec86-cbac-4fdf-901b-86cb96e3f52b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.514183] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6431d8-c96d-45f0-9145-4439f7bf75f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.546588] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a099f10-f7fe-4b98-8bbc-fde6943aa540 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.554857] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdec1740-455b-4ff5-b272-b4ac4463ff67 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.569093] env[62525]: DEBUG nova.compute.provider_tree [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.072077] env[62525]: DEBUG nova.scheduler.client.report [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1967.578334] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.181s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.611081] env[62525]: INFO nova.network.neutron [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating port bceaa7b6-06fc-45f3-be4d-d376a854cc39 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1969.030468] env[62525]: DEBUG nova.compute.manager [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1969.030717] env[62525]: DEBUG oslo_concurrency.lockutils [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.030917] env[62525]: DEBUG oslo_concurrency.lockutils [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.031505] env[62525]: DEBUG oslo_concurrency.lockutils [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.031913] env[62525]: DEBUG nova.compute.manager [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] No waiting events found dispatching network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1969.032056] env[62525]: WARNING nova.compute.manager [req-8ee78337-5858-49de-b3d0-eebf73f74ad1 req-9585b3c5-554c-4838-aecd-17d1efbc6bb1 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received unexpected event network-vif-plugged-bceaa7b6-06fc-45f3-be4d-d376a854cc39 for instance with vm_state shelved_offloaded and task_state spawning. [ 1969.127810] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.128061] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.128291] env[62525]: DEBUG nova.network.neutron [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1969.849367] env[62525]: DEBUG nova.network.neutron [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.352914] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.375660] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9c18e5350457980fddf64e6c14c7efb4',container_format='bare',created_at=2024-12-12T00:20:26Z,direct_url=,disk_format='vmdk',id=10429895-8278-4c9a-a80c-8a0d17d7ac47,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-17594542-shelved',owner='209b99adb38b4c8b9e5a277019dbe292',properties=ImageMetaProps,protected=,size=31670784,status='active',tags=,updated_at=2024-12-12T00:20:39Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1970.375932] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1970.376127] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1970.376322] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1970.376476] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1970.376619] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1970.376844] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1970.377016] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1970.377199] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1970.377362] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1970.377532] env[62525]: DEBUG nova.virt.hardware [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1970.378427] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801f1173-8178-4d16-b0bc-c3871ad0c75e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.387638] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e080fd-ca24-4295-acad-6f89ec26d1af {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.401612] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:96:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bceaa7b6-06fc-45f3-be4d-d376a854cc39', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1970.409018] env[62525]: DEBUG oslo.service.loopingcall [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1970.409296] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1970.409506] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45437999-ee8f-4254-ae48-5fc581ff02cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.428145] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1970.428145] env[62525]: value = "task-1782356" [ 1970.428145] env[62525]: _type = "Task" [ 1970.428145] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.435765] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782356, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.938558] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782356, 'name': CreateVM_Task, 'duration_secs': 0.363703} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.938735] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1970.939387] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.939558] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.939975] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1970.940254] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb7d4b83-dabe-4c07-af68-2ffa69f72064 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.945922] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1970.945922] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52025077-8309-2a13-07cf-248bbb28f2f5" [ 1970.945922] env[62525]: _type = "Task" [ 1970.945922] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.954565] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52025077-8309-2a13-07cf-248bbb28f2f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.056092] env[62525]: DEBUG nova.compute.manager [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1971.056286] env[62525]: DEBUG nova.compute.manager [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing instance network info cache due to event network-changed-bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1971.056524] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.056671] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.056829] env[62525]: DEBUG nova.network.neutron [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Refreshing network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1971.457809] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.458254] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Processing image 10429895-8278-4c9a-a80c-8a0d17d7ac47 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1971.458331] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.458430] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.458608] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1971.458855] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-463c5d1d-cbe2-46ff-aa75-2a09203f2e7c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.468178] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1971.468380] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1971.469176] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24b2f395-3e23-492e-a077-d720a53f4d09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.474426] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1971.474426] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52a5d6da-4750-7a38-9bb2-8479dc7de1d3" [ 1971.474426] env[62525]: _type = "Task" [ 1971.474426] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.482351] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52a5d6da-4750-7a38-9bb2-8479dc7de1d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.779174] env[62525]: DEBUG nova.network.neutron [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updated VIF entry in instance network info cache for port bceaa7b6-06fc-45f3-be4d-d376a854cc39. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1971.779593] env[62525]: DEBUG nova.network.neutron [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.984570] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1971.984832] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Fetch image to [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe/OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1971.985058] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Downloading stream optimized image 10429895-8278-4c9a-a80c-8a0d17d7ac47 to [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe/OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe.vmdk on the data store datastore1 as vApp {{(pid=62525) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1971.985241] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Downloading image file data 10429895-8278-4c9a-a80c-8a0d17d7ac47 to the ESX as VM named 'OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe' {{(pid=62525) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1972.054618] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1972.054618] env[62525]: value = "resgroup-9" [ 1972.054618] env[62525]: _type = "ResourcePool" [ 1972.054618] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1972.055056] env[62525]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-bbbd0dd2-78c4-4b12-9436-dbd18e560906 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.079717] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease: (returnval){ [ 1972.079717] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1972.079717] env[62525]: _type = "HttpNfcLease" [ 1972.079717] env[62525]: } obtained for vApp import into resource pool (val){ [ 1972.079717] env[62525]: value = "resgroup-9" [ 1972.079717] env[62525]: _type = "ResourcePool" [ 1972.079717] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1972.080135] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the lease: (returnval){ [ 1972.080135] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1972.080135] env[62525]: _type = "HttpNfcLease" [ 1972.080135] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1972.090494] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1972.090494] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1972.090494] env[62525]: _type = "HttpNfcLease" [ 1972.090494] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1972.282758] env[62525]: DEBUG oslo_concurrency.lockutils [req-d1309764-bc32-4198-a1b6-bd363ce3473d req-cd810d0d-6a61-4248-a8b8-70f83b235541 service nova] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.590074] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1972.590074] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1972.590074] env[62525]: _type = "HttpNfcLease" [ 1972.590074] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1973.089623] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1973.089623] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1973.089623] env[62525]: _type = "HttpNfcLease" [ 1973.089623] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1973.089927] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1973.089927] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527b8074-ead9-f4c2-7c31-4bb2f74fbb1b" [ 1973.089927] env[62525]: _type = "HttpNfcLease" [ 1973.089927] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1973.090921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dfab6a-875d-4db0-8158-c4475675b37c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.098512] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1973.098699] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating HTTP connection to write to file with size = 31670784 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1973.165421] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-25bf8b42-4de3-417f-a254-5df04061a13b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.795423] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1973.796467] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7aeb43-77ce-4c15-bcd7-b8c127198f1c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.804701] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1973.804879] env[62525]: ERROR oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk due to incomplete transfer. [ 1973.805165] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f72e3d6f-697c-47a7-8cb7-bdda46e97cea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.821686] env[62525]: DEBUG oslo_vmware.rw_handles [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e6b360-13f2-e9fb-8e48-9d2e294d2724/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1973.821904] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploaded image de336897-32c6-47c1-9b97-3fc23d47285b to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1973.824691] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1973.826591] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a11740e8-1edd-474a-a59a-3e4a7638797b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.833427] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1973.833427] env[62525]: value = "task-1782358" [ 1973.833427] env[62525]: _type = "Task" [ 1973.833427] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.843918] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782358, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.340259] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1974.340482] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1974.341274] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b142709-6f3b-4252-b982-410ba3470912 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.349484] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782358, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.351017] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1974.351200] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1974.351425] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7e7469ea-795a-4942-baec-fc53a3ef05b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.581522] env[62525]: DEBUG oslo_vmware.rw_handles [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52699eb5-7564-0f30-9885-09300d3e7a49/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1974.581745] env[62525]: INFO nova.virt.vmwareapi.images [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Downloaded image file data 10429895-8278-4c9a-a80c-8a0d17d7ac47 [ 1974.582635] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bc7f55-530d-4bce-aa01-4f155d02389b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.598332] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5089add-781c-4cd2-972a-3d9aee54513b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.802264] env[62525]: INFO nova.virt.vmwareapi.images [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] The imported VM was unregistered [ 1974.804077] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1974.804306] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Creating directory with path [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1974.804567] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa3dd5d8-b3c3-4bc3-8613-f52d38836dfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.843976] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782358, 'name': Destroy_Task} progress is 100%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.910771] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Created directory with path [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1974.910981] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe/OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe.vmdk to [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk. {{(pid=62525) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1974.911262] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6995cf76-dcd5-46c9-ae56-f0f7a826ed51 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.917920] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1974.917920] env[62525]: value = "task-1782360" [ 1974.917920] env[62525]: _type = "Task" [ 1974.917920] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.925762] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.146720] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.147054] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.147329] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.147564] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.147776] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.149944] env[62525]: INFO nova.compute.manager [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Terminating instance [ 1975.151809] env[62525]: DEBUG nova.compute.manager [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1975.152068] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1975.152326] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d507d9ce-70e3-402b-bd9b-460768712b09 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.158985] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1975.158985] env[62525]: value = "task-1782361" [ 1975.158985] env[62525]: _type = "Task" [ 1975.158985] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.167193] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.345632] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782358, 'name': Destroy_Task, 'duration_secs': 1.413153} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.345917] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroyed the VM [ 1975.346202] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1975.346465] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5348c595-57b5-483f-81f6-fe489c2050c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.352651] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1975.352651] env[62525]: value = "task-1782362" [ 1975.352651] env[62525]: _type = "Task" [ 1975.352651] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.360388] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.428582] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.670331] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782361, 'name': PowerOffVM_Task, 'duration_secs': 0.241165} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.670613] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1975.670820] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1975.671024] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369869', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'name': 'volume-004ac126-a2b0-4eff-a790-f50f2497a817', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ed5234ba-d26b-47da-8c9b-4cc591baf087', 'attached_at': '2024-12-12T00:20:20.000000', 'detached_at': '', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'serial': '004ac126-a2b0-4eff-a790-f50f2497a817'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1975.671830] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98c4cbd-edb7-448b-94b9-956cc4ee7ea2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.693131] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4476200b-41e7-4faf-aaef-d3ebfddec3f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.702334] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaadeefe-3cf7-4181-b044-d5e9349d5c47 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.722818] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14b3db1-32e8-4880-a439-ee1335fb262f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.742858] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] The volume has not been displaced from its original location: [datastore1] volume-004ac126-a2b0-4eff-a790-f50f2497a817/volume-004ac126-a2b0-4eff-a790-f50f2497a817.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1975.748516] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1975.748636] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2da65185-3b35-400c-93f2-7ba0c295df59 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.769475] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1975.769475] env[62525]: value = "task-1782363" [ 1975.769475] env[62525]: _type = "Task" [ 1975.769475] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.779902] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.864599] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.930246] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.282615] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.364838] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.431033] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.782630] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782363, 'name': ReconfigVM_Task, 'duration_secs': 0.99769} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.782954] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1976.787819] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6917451-197d-4f9f-ab1b-1b9508613c53 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.806138] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1976.806138] env[62525]: value = "task-1782364" [ 1976.806138] env[62525]: _type = "Task" [ 1976.806138] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.816962] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.867890] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.931355] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.318766] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.367621] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.431792] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.817663] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782364, 'name': ReconfigVM_Task, 'duration_secs': 0.573094} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.817965] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369869', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'name': 'volume-004ac126-a2b0-4eff-a790-f50f2497a817', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ed5234ba-d26b-47da-8c9b-4cc591baf087', 'attached_at': '2024-12-12T00:20:20.000000', 'detached_at': '', 'volume_id': '004ac126-a2b0-4eff-a790-f50f2497a817', 'serial': '004ac126-a2b0-4eff-a790-f50f2497a817'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1977.818251] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1977.818988] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b152b2-c3ca-4bf2-a427-e9ac22b4dca0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.825161] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1977.825377] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9b14ac4-8fba-46c5-bcb1-ba16d86c94ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.863823] env[62525]: DEBUG oslo_vmware.api [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782362, 'name': RemoveSnapshot_Task, 'duration_secs': 2.368428} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.864075] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1977.864319] env[62525]: INFO nova.compute.manager [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 16.27 seconds to snapshot the instance on the hypervisor. [ 1977.890806] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1977.891148] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1977.891282] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] ed5234ba-d26b-47da-8c9b-4cc591baf087 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1977.891489] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd24c818-291d-4660-bbf7-2d4588f47f92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.899051] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1977.899051] env[62525]: value = "task-1782366" [ 1977.899051] env[62525]: _type = "Task" [ 1977.899051] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.906925] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.929159] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782360, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.594745} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.929390] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe/OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe.vmdk to [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk. [ 1977.929571] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Cleaning up location [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1977.929730] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_43181403-39b8-4a69-97d0-4009ac7d9cbe {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1977.929975] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b1a4037-567a-45df-8527-87d4e88af1a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.935495] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1977.935495] env[62525]: value = "task-1782367" [ 1977.935495] env[62525]: _type = "Task" [ 1977.935495] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.944045] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.406679] env[62525]: DEBUG nova.compute.manager [None req-f203cda5-156c-48fd-9e8d-58e91f3f6820 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Found 2 images (rotation: 2) {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1978.413094] env[62525]: DEBUG oslo_vmware.api [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098152} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.413331] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1978.413509] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1978.413837] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1978.413837] env[62525]: INFO nova.compute.manager [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Took 3.26 seconds to destroy the instance on the hypervisor. [ 1978.414068] env[62525]: DEBUG oslo.service.loopingcall [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1978.414387] env[62525]: DEBUG nova.compute.manager [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1978.414387] env[62525]: DEBUG nova.network.neutron [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1978.444323] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037897} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.444555] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1978.444717] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.445636] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk to [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1978.445925] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73cb33ef-2df7-4691-81aa-fe6aa4d189b5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.452196] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1978.452196] env[62525]: value = "task-1782368" [ 1978.452196] env[62525]: _type = "Task" [ 1978.452196] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.459541] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.907383] env[62525]: DEBUG nova.compute.manager [req-ecfee9c6-5b84-48e2-ac15-1608723fc8ab req-0e978d8b-3ac3-4a5b-99d5-ccae6530e56a service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Received event network-vif-deleted-12e7bbdb-87db-4e0a-9d1d-21fc6357160d {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1978.907642] env[62525]: INFO nova.compute.manager [req-ecfee9c6-5b84-48e2-ac15-1608723fc8ab req-0e978d8b-3ac3-4a5b-99d5-ccae6530e56a service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Neutron deleted interface 12e7bbdb-87db-4e0a-9d1d-21fc6357160d; detaching it from the instance and deleting it from the info cache [ 1978.907804] env[62525]: DEBUG nova.network.neutron [req-ecfee9c6-5b84-48e2-ac15-1608723fc8ab req-0e978d8b-3ac3-4a5b-99d5-ccae6530e56a service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.964272] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.243555] env[62525]: DEBUG nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1979.244541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a8845c-cba2-42ed-9693-d6354b150556 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.379628] env[62525]: DEBUG nova.network.neutron [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.411244] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d006681-06b1-4a55-86b1-1dfee983f651 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.423530] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5fe72c-16c2-4bc1-8081-01d098aaf6cc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.456847] env[62525]: DEBUG nova.compute.manager [req-ecfee9c6-5b84-48e2-ac15-1608723fc8ab req-0e978d8b-3ac3-4a5b-99d5-ccae6530e56a service nova] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Detach interface failed, port_id=12e7bbdb-87db-4e0a-9d1d-21fc6357160d, reason: Instance ed5234ba-d26b-47da-8c9b-4cc591baf087 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1979.466772] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.758657] env[62525]: INFO nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] instance snapshotting [ 1979.759396] env[62525]: DEBUG nova.objects.instance [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1979.883071] env[62525]: INFO nova.compute.manager [-] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Took 1.47 seconds to deallocate network for instance. [ 1979.968507] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.265338] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef834c6-e825-49b1-a3cf-6511da1e72b1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.287807] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6615db9e-0be9-42c8-9d47-0ad21852b97e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.429528] env[62525]: INFO nova.compute.manager [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Took 0.55 seconds to detach 1 volumes for instance. [ 1980.431855] env[62525]: DEBUG nova.compute.manager [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Deleting volume: 004ac126-a2b0-4eff-a790-f50f2497a817 {{(pid=62525) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1980.469555] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.801721] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1980.801923] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-787e7a8c-262c-47aa-b017-5ee2f2b3f1ec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.809271] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1980.809271] env[62525]: value = "task-1782370" [ 1980.809271] env[62525]: _type = "Task" [ 1980.809271] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.817710] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.968709] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782368, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.300264} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.969097] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/10429895-8278-4c9a-a80c-8a0d17d7ac47/10429895-8278-4c9a-a80c-8a0d17d7ac47.vmdk to [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1980.969777] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd5006e-825e-4ac4-9437-47e93aa4b177 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.975108] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.975474] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.975701] env[62525]: DEBUG nova.objects.instance [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'resources' on Instance uuid ed5234ba-d26b-47da-8c9b-4cc591baf087 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.994398] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1980.997727] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de805005-5bd3-497c-8699-d87b13efe30b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.018112] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1981.018112] env[62525]: value = "task-1782371" [ 1981.018112] env[62525]: _type = "Task" [ 1981.018112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.029537] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782371, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.103375] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db71e408-4a6c-433a-91a7-3b5550e5690e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.118595] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d55737e-c8d8-4823-950b-0bc2ed0c07cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.173363] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2b8030-0815-4e83-bf96-a34e91936776 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.188285] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6b58f3-0164-4cca-9c59-3cc7797e19ef {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.210542] env[62525]: DEBUG nova.compute.provider_tree [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.318479] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.527822] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782371, 'name': ReconfigVM_Task, 'duration_secs': 0.303064} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.528125] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55/0e18bcbf-1b77-46cb-99d4-62668b9c8a55.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1981.528757] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f041578-473d-441a-885c-f3aa172e4da1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.535269] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1981.535269] env[62525]: value = "task-1782372" [ 1981.535269] env[62525]: _type = "Task" [ 1981.535269] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.542873] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782372, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.717381] env[62525]: DEBUG nova.scheduler.client.report [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1981.818838] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.046583] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782372, 'name': Rename_Task, 'duration_secs': 0.144994} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.046940] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1982.047095] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa4a1faf-5666-4009-9a9d-47364be30dd4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.053535] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 1982.053535] env[62525]: value = "task-1782373" [ 1982.053535] env[62525]: _type = "Task" [ 1982.053535] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.060551] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.222930] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.247s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.241408] env[62525]: INFO nova.scheduler.client.report [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocations for instance ed5234ba-d26b-47da-8c9b-4cc591baf087 [ 1982.320850] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782370, 'name': CreateSnapshot_Task, 'duration_secs': 1.417353} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.321185] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1982.321959] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac95d59a-12cf-4658-9348-cca7dc0f1040 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.563407] env[62525]: DEBUG oslo_vmware.api [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782373, 'name': PowerOnVM_Task, 'duration_secs': 0.479779} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.563698] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1982.659161] env[62525]: DEBUG nova.compute.manager [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1982.660113] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e393caf-9854-4ed0-8c38-6153366db544 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.748727] env[62525]: DEBUG oslo_concurrency.lockutils [None req-83c33d96-d16b-4288-9172-d0f0da3ed676 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "ed5234ba-d26b-47da-8c9b-4cc591baf087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.602s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.839689] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1982.839946] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8c9084dc-5b29-4181-836a-6bafa29c3bb0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.849408] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1982.849408] env[62525]: value = "task-1782374" [ 1982.849408] env[62525]: _type = "Task" [ 1982.849408] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.857363] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782374, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.176180] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ea66a5a3-a2ff-4a6f-9d95-b0dc88173b2d tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.929s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.359564] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782374, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.859880] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782374, 'name': CloneVM_Task} progress is 95%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.945814] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.946190] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.946537] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.946787] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.947041] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.949764] env[62525]: INFO nova.compute.manager [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Terminating instance [ 1983.951984] env[62525]: DEBUG nova.compute.manager [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1983.952264] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1983.953481] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72aff9b9-66dc-47cd-a9a0-6eaac6d37bde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.965404] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1983.965795] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8168347-a829-4f13-8ae0-f2a7dd681d49 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.973350] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1983.973350] env[62525]: value = "task-1782375" [ 1983.973350] env[62525]: _type = "Task" [ 1983.973350] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.981485] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.360979] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782374, 'name': CloneVM_Task, 'duration_secs': 1.19733} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.361417] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Created linked-clone VM from snapshot [ 1984.361910] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08621c97-1b66-4ea8-b67d-5b4f4598bfa0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.369265] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploading image 75c4caab-77a4-49b9-8003-8835132799de {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1984.393625] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1984.393625] env[62525]: value = "vm-369882" [ 1984.393625] env[62525]: _type = "VirtualMachine" [ 1984.393625] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1984.393875] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-30d12bc2-1dca-428b-b949-c9b988584baf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.400039] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease: (returnval){ [ 1984.400039] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525765cd-7b4a-d9b1-429a-55eb0ebe243e" [ 1984.400039] env[62525]: _type = "HttpNfcLease" [ 1984.400039] env[62525]: } obtained for exporting VM: (result){ [ 1984.400039] env[62525]: value = "vm-369882" [ 1984.400039] env[62525]: _type = "VirtualMachine" [ 1984.400039] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1984.400285] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the lease: (returnval){ [ 1984.400285] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525765cd-7b4a-d9b1-429a-55eb0ebe243e" [ 1984.400285] env[62525]: _type = "HttpNfcLease" [ 1984.400285] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1984.406108] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1984.406108] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525765cd-7b4a-d9b1-429a-55eb0ebe243e" [ 1984.406108] env[62525]: _type = "HttpNfcLease" [ 1984.406108] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1984.488304] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782375, 'name': PowerOffVM_Task, 'duration_secs': 0.164065} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.488719] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.488991] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.489370] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d2c8cb9-8832-4113-97c6-4dc5470b05f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.568643] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.568984] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.569291] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.569653] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eae48be2-d437-46d2-9225-84b9b442aa0d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.577138] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1984.577138] env[62525]: value = "task-1782378" [ 1984.577138] env[62525]: _type = "Task" [ 1984.577138] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.587871] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.909192] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1984.909192] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525765cd-7b4a-d9b1-429a-55eb0ebe243e" [ 1984.909192] env[62525]: _type = "HttpNfcLease" [ 1984.909192] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1984.909473] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1984.909473] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]525765cd-7b4a-d9b1-429a-55eb0ebe243e" [ 1984.909473] env[62525]: _type = "HttpNfcLease" [ 1984.909473] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1984.910219] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b87181-e28a-483a-8b3c-2096de9c643f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.917566] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1984.917734] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1985.006344] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-14a4038a-3e61-4459-ba64-653b2417d76e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.086829] env[62525]: DEBUG oslo_vmware.api [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122212} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.087100] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.087289] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1985.087470] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1985.087639] env[62525]: INFO nova.compute.manager [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1985.087873] env[62525]: DEBUG oslo.service.loopingcall [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.088068] env[62525]: DEBUG nova.compute.manager [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1985.088164] env[62525]: DEBUG nova.network.neutron [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1985.358284] env[62525]: DEBUG nova.compute.manager [req-58f03280-3c6a-439f-b331-b5309a3dd5ae req-524bd909-b2ca-4be3-80d2-be7c52141694 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Received event network-vif-deleted-386f6960-c9a5-4c48-9197-bf7df64deb96 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1985.358487] env[62525]: INFO nova.compute.manager [req-58f03280-3c6a-439f-b331-b5309a3dd5ae req-524bd909-b2ca-4be3-80d2-be7c52141694 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Neutron deleted interface 386f6960-c9a5-4c48-9197-bf7df64deb96; detaching it from the instance and deleting it from the info cache [ 1985.358662] env[62525]: DEBUG nova.network.neutron [req-58f03280-3c6a-439f-b331-b5309a3dd5ae req-524bd909-b2ca-4be3-80d2-be7c52141694 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.834758] env[62525]: DEBUG nova.network.neutron [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.861807] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23b57ff7-a995-4906-bf76-472662285322 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.875038] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9d687c-313c-4659-9e9e-95e6e0dc9b30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.901783] env[62525]: DEBUG nova.compute.manager [req-58f03280-3c6a-439f-b331-b5309a3dd5ae req-524bd909-b2ca-4be3-80d2-be7c52141694 service nova] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Detach interface failed, port_id=386f6960-c9a5-4c48-9197-bf7df64deb96, reason: Instance 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1986.341411] env[62525]: INFO nova.compute.manager [-] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Took 1.25 seconds to deallocate network for instance. [ 1986.848888] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.849269] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.849417] env[62525]: DEBUG nova.objects.instance [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'resources' on Instance uuid 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1987.420921] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca1a113-7f28-43db-a2fa-7525f0b5ec07 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.429575] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4afb08-5b14-404a-9522-abadbf28c9eb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.460326] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde22216-21f6-45e2-a6f6-e86454ce0d76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.467899] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567e4f42-e3d4-4bea-b7f3-553678950db4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.482048] env[62525]: DEBUG nova.compute.provider_tree [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1988.005872] env[62525]: ERROR nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [req-16e8806b-38f3-48bd-9542-38749a6b27dd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID bb89c0ac-8f56-43c6-9f73-fd897be63424. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-16e8806b-38f3-48bd-9542-38749a6b27dd"}]} [ 1988.023261] env[62525]: DEBUG nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Refreshing inventories for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1988.039421] env[62525]: DEBUG nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating ProviderTree inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1988.039660] env[62525]: DEBUG nova.compute.provider_tree [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1988.051556] env[62525]: DEBUG nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Refreshing aggregate associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, aggregates: None {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1988.072796] env[62525]: DEBUG nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Refreshing trait associations for resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62525) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1988.132476] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4ffda2-06d6-4be8-96ad-6a2466c34e89 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.140186] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ad9841-c35e-4c17-881e-4955ec527fc1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.172108] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a3b9aa-ed76-4bfd-85fa-c6fa98dafb97 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.179847] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2d4f3f-2ec0-47df-976b-771a2668ddd6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.193097] env[62525]: DEBUG nova.compute.provider_tree [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1988.723291] env[62525]: DEBUG nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updated inventory for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with generation 170 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1988.723638] env[62525]: DEBUG nova.compute.provider_tree [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating resource provider bb89c0ac-8f56-43c6-9f73-fd897be63424 generation from 170 to 171 during operation: update_inventory {{(pid=62525) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1988.723822] env[62525]: DEBUG nova.compute.provider_tree [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Updating inventory in ProviderTree for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1989.228805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.379s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.246421] env[62525]: INFO nova.scheduler.client.report [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocations for instance 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86 [ 1989.754457] env[62525]: DEBUG oslo_concurrency.lockutils [None req-23c7ae32-0268-4944-a358-f5de17a268e6 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.808s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.443616] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1992.444555] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c936f1a-1d36-4710-a30f-fc4c6b0685cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.450422] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1992.450597] env[62525]: ERROR oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk due to incomplete transfer. [ 1992.450781] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ecd5ab38-fb6b-4f03-8a25-3c4d27aa3abe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.457270] env[62525]: DEBUG oslo_vmware.rw_handles [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529b9715-e968-4cd7-e9b0-eed03d405110/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1992.457469] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Uploaded image 75c4caab-77a4-49b9-8003-8835132799de to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1992.459702] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1992.460278] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9620c0fe-fa50-43bb-b56a-a8e236a78511 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.464930] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1992.464930] env[62525]: value = "task-1782380" [ 1992.464930] env[62525]: _type = "Task" [ 1992.464930] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.472292] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782380, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.974846] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782380, 'name': Destroy_Task, 'duration_secs': 0.395844} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.975141] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroyed the VM [ 1992.975386] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1992.975623] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d8109dc3-6d31-466d-93e1-f0344b08892a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.981844] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1992.981844] env[62525]: value = "task-1782381" [ 1992.981844] env[62525]: _type = "Task" [ 1992.981844] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.990952] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782381, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.490983] env[62525]: DEBUG oslo_vmware.api [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782381, 'name': RemoveSnapshot_Task, 'duration_secs': 0.481503} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.492188] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1993.492188] env[62525]: INFO nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 13.23 seconds to snapshot the instance on the hypervisor. [ 1993.619304] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.619566] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.619763] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.619942] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.620129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.622221] env[62525]: INFO nova.compute.manager [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Terminating instance [ 1993.623853] env[62525]: DEBUG nova.compute.manager [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1993.624059] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1993.624869] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5d65e2-23df-4cef-9d8c-845a92663b7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.632371] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1993.632585] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9134642d-0714-4d72-937e-4d494617f06a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.639188] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1993.639188] env[62525]: value = "task-1782382" [ 1993.639188] env[62525]: _type = "Task" [ 1993.639188] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.646639] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.036150] env[62525]: DEBUG nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Found 3 images (rotation: 2) {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1994.036356] env[62525]: DEBUG nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Rotating out 1 backups {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1994.036519] env[62525]: DEBUG nova.compute.manager [None req-1ec3136f-28d7-4ff6-9601-97651d6810bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleting image 4f6edf16-9b31-4c97-b7c6-52b755e6445a {{(pid=62525) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1994.149406] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782382, 'name': PowerOffVM_Task, 'duration_secs': 0.280627} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.149678] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.149849] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.150108] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58a6b0d7-0195-4726-80ce-0ac7ed3ff94b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.265952] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.266200] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.266310] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1995.054727] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1995.055060] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1995.055161] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleting the datastore file [datastore1] 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1995.055435] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b4fe893-ce47-46ba-b5e2-7f73c93e5d00 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.062063] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for the task: (returnval){ [ 1995.062063] env[62525]: value = "task-1782384" [ 1995.062063] env[62525]: _type = "Task" [ 1995.062063] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.069773] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.300217] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.300418] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.300600] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1995.575640] env[62525]: DEBUG oslo_vmware.api [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Task: {'id': task-1782384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129139} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.575993] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1995.576261] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1995.576583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1995.576699] env[62525]: INFO nova.compute.manager [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Took 1.95 seconds to destroy the instance on the hypervisor. [ 1995.577368] env[62525]: DEBUG oslo.service.loopingcall [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1995.577368] env[62525]: DEBUG nova.compute.manager [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1995.577368] env[62525]: DEBUG nova.network.neutron [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1995.867797] env[62525]: DEBUG nova.compute.manager [req-677889e8-3800-424e-9a46-c227099083d2 req-5c26fc61-2d5a-410b-acdd-b9a107566e08 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Received event network-vif-deleted-52d67f7f-d861-4c0b-bfa2-c2f41085fb6f {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1995.868011] env[62525]: INFO nova.compute.manager [req-677889e8-3800-424e-9a46-c227099083d2 req-5c26fc61-2d5a-410b-acdd-b9a107566e08 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Neutron deleted interface 52d67f7f-d861-4c0b-bfa2-c2f41085fb6f; detaching it from the instance and deleting it from the info cache [ 1995.868194] env[62525]: DEBUG nova.network.neutron [req-677889e8-3800-424e-9a46-c227099083d2 req-5c26fc61-2d5a-410b-acdd-b9a107566e08 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.070321] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.070704] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.070762] env[62525]: DEBUG nova.compute.manager [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1996.071645] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8352487a-4eb0-478a-86d5-da652b0e6f31 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.078596] env[62525]: DEBUG nova.compute.manager [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62525) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1996.079152] env[62525]: DEBUG nova.objects.instance [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1996.341421] env[62525]: DEBUG nova.network.neutron [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.370351] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78213a8b-a321-4f42-9367-482e61eda6ba {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.380690] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31beb0f-fbed-4a32-87c6-833c0a73fa81 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.407665] env[62525]: DEBUG nova.compute.manager [req-677889e8-3800-424e-9a46-c227099083d2 req-5c26fc61-2d5a-410b-acdd-b9a107566e08 service nova] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Detach interface failed, port_id=52d67f7f-d861-4c0b-bfa2-c2f41085fb6f, reason: Instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1996.532544] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [{"id": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "address": "fa:16:3e:61:96:1d", "network": {"id": "677d00e2-f75e-4ce8-8129-2852bf1070d6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-705563966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "209b99adb38b4c8b9e5a277019dbe292", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbceaa7b6-06", "ovs_interfaceid": "bceaa7b6-06fc-45f3-be4d-d376a854cc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.583788] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.584092] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a368af5-c4a3-409c-b1f0-6ae5097b9079 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.591357] env[62525]: DEBUG oslo_vmware.api [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 1996.591357] env[62525]: value = "task-1782385" [ 1996.591357] env[62525]: _type = "Task" [ 1996.591357] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.599263] env[62525]: DEBUG oslo_vmware.api [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.844828] env[62525]: INFO nova.compute.manager [-] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Took 1.27 seconds to deallocate network for instance. [ 1997.034952] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-0e18bcbf-1b77-46cb-99d4-62668b9c8a55" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.035132] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1997.035305] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.035458] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.035599] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.035743] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.035915] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.036121] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.036265] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1997.036415] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1997.100613] env[62525]: DEBUG oslo_vmware.api [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782385, 'name': PowerOffVM_Task, 'duration_secs': 0.211374} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.100943] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1997.101059] env[62525]: DEBUG nova.compute.manager [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1997.101724] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd197f1-ec45-47d2-8ee7-9f0c4618e525 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.351622] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.351928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.352181] env[62525]: DEBUG nova.objects.instance [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lazy-loading 'resources' on Instance uuid 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1997.539200] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.612279] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3b061499-f05a-4406-aad5-e10c65b2ef2c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.542s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.906836] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bfe221-b230-4f19-8692-1bd6e35d852a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.914933] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357b68ee-c26c-4950-a35b-e829040a0553 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.945609] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525acd26-e6de-445d-8f2b-b3151d2aed6a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.953148] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5991ae-724f-421d-a1d6-5afda9424a76 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.967876] env[62525]: DEBUG nova.compute.provider_tree [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.471595] env[62525]: DEBUG nova.scheduler.client.report [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1998.976489] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.978709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.440s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.978891] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.979422] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1998.980373] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eefac2f-c125-4924-a7ba-da3a316e9c56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.989110] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ad8c8f-0cdf-44a5-bb34-c4b8069d810a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.994014] env[62525]: INFO nova.scheduler.client.report [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Deleted allocations for instance 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394 [ 1999.005971] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688b2ade-e4db-462b-8fdc-f183d3100ea8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.012598] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b99275c-4a25-4f5c-aae3-da4fbccabe30 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.041803] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180296MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1999.041956] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.042155] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.274702] env[62525]: DEBUG nova.compute.manager [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Stashing vm_state: stopped {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1999.512098] env[62525]: DEBUG oslo_concurrency.lockutils [None req-ad9e6642-158f-4591-82f2-e753a9e71d14 tempest-ServerActionsTestOtherA-2010789965 tempest-ServerActionsTestOtherA-2010789965-project-member] Lock "8d8b8a9e-c9ad-42d3-8a71-9f6e62206394" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.892s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.793872] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.049863] env[62525]: INFO nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating resource usage from migration e51c58fb-1b55-4171-900a-4033d7ceb081 [ 2000.067017] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2000.067200] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Migration e51c58fb-1b55-4171-900a-4033d7ceb081 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2000.067327] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 870d7795-49ca-4201-983a-a85b590e805e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2000.067500] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2000.067638] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2000.110511] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af22d98-72d6-42cd-b36a-10784ee035e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.118570] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55252757-0964-48ac-b089-d8961527a1bc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.148380] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a93bebe-913d-46a6-98cf-12285d455332 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.155487] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb613e4c-266e-4bc8-9e23-df79df73d416 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.168495] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2000.675019] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.179271] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2001.179489] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.137s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.179772] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.386s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.181520] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.181609] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2001.686372] env[62525]: INFO nova.compute.claims [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2001.701764] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] There are 50 instances to clean {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2001.701939] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: ed5234ba-d26b-47da-8c9b-4cc591baf087] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2002.206029] env[62525]: INFO nova.compute.resource_tracker [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating resource usage from migration e51c58fb-1b55-4171-900a-4033d7ceb081 [ 2002.209096] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: abd538d5-f433-4896-9871-5cdef303cda0] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2002.263345] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f8b01b-1106-4198-aae2-75ba7c0c21c4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.270879] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b48ca4-e86b-4196-be1e-c39fc650912e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.300405] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a457e1-54ea-4872-8dab-a84628010fb8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.307191] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0463531-6dd5-4837-abb6-0768bc8650ea {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.320322] env[62525]: DEBUG nova.compute.provider_tree [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.712040] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c008fa1d-7cd6-4f8c-9459-4a47f342eeaf] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2002.823164] env[62525]: DEBUG nova.scheduler.client.report [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2003.215048] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c802b4f6-f34b-4d40-9bba-1b6d56643b8c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2003.328411] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.148s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.329026] env[62525]: INFO nova.compute.manager [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Migrating [ 2003.718283] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5bd77e82-1b44-40b7-ad08-ff1d7d9e5b86] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2003.844302] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.844487] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.844650] env[62525]: DEBUG nova.network.neutron [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.220955] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: fd078815-58e6-4a3a-9da8-dd5324ea76b8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2004.560071] env[62525]: DEBUG nova.network.neutron [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.724663] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 52341e2f-b556-4f84-b60e-16a3e71df504] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2005.062482] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.227888] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 13020656-4e4f-40ee-a77a-fd64ae340e09] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2005.731602] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c4e31de8-0b94-4fea-aa30-8af5608d257a] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2006.235827] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 7c177d17-d0fe-4df6-900d-e1a6118bc79e] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2006.578899] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cfdd03-8279-4b9a-a6b2-310f9b1f5d77 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.599759] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2006.738361] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 154ac489-69e4-41a8-90cf-b3d6196c4822] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2007.105985] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2007.106314] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d24b8f7a-5dbf-4c64-963f-0a2f8f96b7fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.114956] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2007.114956] env[62525]: value = "task-1782386" [ 2007.114956] env[62525]: _type = "Task" [ 2007.114956] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.124575] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2007.124785] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2007.242043] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 42d14e44-44d6-46de-84e3-049a2d7e84f3] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2007.632055] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2007.632055] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2007.632350] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2007.632350] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2007.632452] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2007.632600] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2007.632801] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2007.632959] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2007.633162] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2007.633334] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2007.633556] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2007.638696] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f5fe0c1-9f52-4b46-8b38-98993cdbab3b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.654328] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2007.654328] env[62525]: value = "task-1782387" [ 2007.654328] env[62525]: _type = "Task" [ 2007.654328] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.662371] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.745579] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b7768ee1-16f7-40f0-9f5f-28df4a1580f2] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2008.164890] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782387, 'name': ReconfigVM_Task, 'duration_secs': 0.161556} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.165205] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2008.249378] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 61fa8887-db88-4adc-8c3f-ffc78e0e550d] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2008.672111] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2008.672338] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2008.672496] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2008.672717] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2008.672873] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2008.673034] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2008.673244] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2008.673402] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2008.673568] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2008.673728] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2008.673898] env[62525]: DEBUG nova.virt.hardware [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2008.679146] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2008.679448] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c03b0a11-60e2-4edf-91ef-707d781b36cf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.698942] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2008.698942] env[62525]: value = "task-1782388" [ 2008.698942] env[62525]: _type = "Task" [ 2008.698942] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.706483] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.753040] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 69a1093a-95d7-4cbb-90bf-1a213470872a] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2009.210984] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782388, 'name': ReconfigVM_Task, 'duration_secs': 0.215755} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.211288] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2009.212124] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcef3076-a570-41ae-8ada-13db270ff199 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.234055] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2009.234343] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee7fa400-e225-474e-9d2b-e03035741eae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.251652] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2009.251652] env[62525]: value = "task-1782389" [ 2009.251652] env[62525]: _type = "Task" [ 2009.251652] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.256065] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 467c6af1-2961-4213-8f0c-fe7591d93b5d] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2009.260847] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.761339] env[62525]: DEBUG oslo_vmware.api [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782389, 'name': ReconfigVM_Task, 'duration_secs': 0.259248} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.761726] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c14f3fb8-3090-4df3-9e78-57ee9d62921f] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2009.763508] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 870d7795-49ca-4201-983a-a85b590e805e/870d7795-49ca-4201-983a-a85b590e805e.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2009.763796] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2010.268597] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 9fcec068-4921-4a42-b948-6e61a44658ce] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2010.272589] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681de697-5563-4942-bab0-ae3ec83774d2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.292197] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f9d56b-84e6-4ffa-a667-7e9956e715c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.309662] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2010.772625] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e1a2983e-28cf-4e3e-9bb9-117b1b6b9ed0] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2010.848675] env[62525]: DEBUG nova.network.neutron [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Port 10f619d4-6192-4474-84e8-35cecf4327f7 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2011.276086] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 160a67ea-5044-4597-9a61-82e05b8aa778] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2011.779294] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5a40ca03-f61c-4232-80dc-7a745a34bc67] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2011.872573] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.872809] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.872958] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.283288] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 4822fcae-9ffa-40fb-9870-2359cdd6b04d] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2012.786412] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0f401a95-7b62-4940-a819-d0d69fc4a59a] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2012.905587] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.905805] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.905984] env[62525]: DEBUG nova.network.neutron [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2013.289333] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 50ee564d-7b27-4bc4-a95e-7717de865cfb] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2013.617726] env[62525]: DEBUG nova.network.neutron [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.793008] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: b0d6acae-8da3-4ed9-8832-b1e88338ed27] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.121084] env[62525]: DEBUG oslo_concurrency.lockutils [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.296333] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6624506c-56ad-41f4-8d90-ed34ccfb9385] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.646121] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c83b42a-55cc-46ef-ab49-b6f5fd3350db {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.666985] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6319b7-1ce7-4c7e-899f-c5b11243e7b6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.673874] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2014.799169] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 130a3015-6caf-4374-a35f-9dd49bb8b3bf] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.180593] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-57c23d95-55a8-49d8-9a5f-70737a7adf05 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance '870d7795-49ca-4201-983a-a85b590e805e' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2015.302302] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c2baf40b-ea57-4552-8d56-45bcd49280ec] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.805360] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: ad4e94cc-d59c-4876-bf66-ec084350f875] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.308894] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8abf0305-2000-4ffe-aa88-e2b355383ea3] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.812161] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 3b1a825f-b6a5-4822-86a5-57972f34748c] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.315134] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 4278fbb1-d2bd-4e92-aaca-260d40aa26b1] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.482265] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.482535] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.482707] env[62525]: DEBUG nova.compute.manager [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Going to confirm migration 9 {{(pid=62525) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2017.818530] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c75091c3-45d2-4c71-b2ad-d38e8a449624] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2018.064496] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.064778] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.064973] env[62525]: DEBUG nova.network.neutron [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.065176] env[62525]: DEBUG nova.objects.instance [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'info_cache' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2018.321597] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c96a9ef9-0ef7-41a2-bb0f-531f82980eb8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2018.825341] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 73156235-1b13-4fda-8957-ed8cd88ceb43] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.046408] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.046743] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.047030] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.047240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.047412] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.049500] env[62525]: INFO nova.compute.manager [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Terminating instance [ 2019.051231] env[62525]: DEBUG nova.compute.manager [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2019.051422] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2019.052304] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be638f7-7c2a-4cbe-b559-0645fad56612 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.060691] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2019.060904] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54675b50-23d6-4737-b78f-2e35480581b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.066886] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 2019.066886] env[62525]: value = "task-1782390" [ 2019.066886] env[62525]: _type = "Task" [ 2019.066886] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.074377] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.286279] env[62525]: DEBUG nova.network.neutron [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.329080] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 5b1a2a46-df4d-41c6-a750-9ec3c75e57f6] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.577085] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782390, 'name': PowerOffVM_Task, 'duration_secs': 0.179011} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.577342] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2019.577511] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2019.577844] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27eb7aa9-1ead-4500-92e3-4297637daddf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.655241] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2019.655414] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2019.655565] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleting the datastore file [datastore1] 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2019.655816] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-958591c5-c1c3-4418-8552-eee18cec2943 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.661588] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for the task: (returnval){ [ 2019.661588] env[62525]: value = "task-1782392" [ 2019.661588] env[62525]: _type = "Task" [ 2019.661588] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.670211] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.789572] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.789860] env[62525]: DEBUG nova.objects.instance [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'migration_context' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2019.830938] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 462bc19d-1eaa-4c57-8ebb-412a97614f03] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.171691] env[62525]: DEBUG oslo_vmware.api [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Task: {'id': task-1782392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224605} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.171971] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2020.172183] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2020.172358] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2020.172531] env[62525]: INFO nova.compute.manager [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2020.172773] env[62525]: DEBUG oslo.service.loopingcall [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.172966] env[62525]: DEBUG nova.compute.manager [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2020.173087] env[62525]: DEBUG nova.network.neutron [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2020.292622] env[62525]: DEBUG nova.objects.base [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Object Instance<870d7795-49ca-4201-983a-a85b590e805e> lazy-loaded attributes: info_cache,migration_context {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2020.293568] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75556f52-1076-4d34-bc60-313d5cd67826 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.312637] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-280ad8df-7160-4eba-b5c9-8c30f129034b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.318365] env[62525]: DEBUG oslo_vmware.api [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2020.318365] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]527fc3d5-2b6f-3190-328b-4caeba024d12" [ 2020.318365] env[62525]: _type = "Task" [ 2020.318365] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.325955] env[62525]: DEBUG oslo_vmware.api [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527fc3d5-2b6f-3190-328b-4caeba024d12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.335010] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 80fbfbda-07fb-43ab-be74-3cbdaf890a55] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.613035] env[62525]: DEBUG nova.compute.manager [req-38bf794b-6add-4fb3-ae6a-945bb1cf884f req-d2e4523d-1f1f-41ad-9483-22d8c1251466 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Received event network-vif-deleted-bceaa7b6-06fc-45f3-be4d-d376a854cc39 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2020.613266] env[62525]: INFO nova.compute.manager [req-38bf794b-6add-4fb3-ae6a-945bb1cf884f req-d2e4523d-1f1f-41ad-9483-22d8c1251466 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Neutron deleted interface bceaa7b6-06fc-45f3-be4d-d376a854cc39; detaching it from the instance and deleting it from the info cache [ 2020.613441] env[62525]: DEBUG nova.network.neutron [req-38bf794b-6add-4fb3-ae6a-945bb1cf884f req-d2e4523d-1f1f-41ad-9483-22d8c1251466 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.828802] env[62525]: DEBUG oslo_vmware.api [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]527fc3d5-2b6f-3190-328b-4caeba024d12, 'name': SearchDatastore_Task, 'duration_secs': 0.006733} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.829116] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.829343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.836192] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: c43b4ce0-5aaf-4e73-a2d8-bf6b5d5ce3a4] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.089190] env[62525]: DEBUG nova.network.neutron [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.115609] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9614d4f-906e-4f79-aa35-c6edaf6bd317 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.125487] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e50a97f-a514-4143-aa28-7325e455519d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.150515] env[62525]: DEBUG nova.compute.manager [req-38bf794b-6add-4fb3-ae6a-945bb1cf884f req-d2e4523d-1f1f-41ad-9483-22d8c1251466 service nova] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Detach interface failed, port_id=bceaa7b6-06fc-45f3-be4d-d376a854cc39, reason: Instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2021.338896] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: cb043ab8-dff7-48c6-b50b-a4d77a01eb41] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.386052] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550e3d2f-48c2-490f-8af0-5edc47ff2609 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.393759] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5459806-0625-4fe5-a4cb-39a9b7ec77ac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.423721] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5ae489-430b-49de-b80d-31ed1bbc3d8b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.431048] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5e3712-80ed-4415-9a6a-ebad1e9a3cac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.444834] env[62525]: DEBUG nova.compute.provider_tree [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2021.592037] env[62525]: INFO nova.compute.manager [-] [instance: 0e18bcbf-1b77-46cb-99d4-62668b9c8a55] Took 1.42 seconds to deallocate network for instance. [ 2021.842731] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 82443424-6071-44b3-bd9a-f92a1a650f27] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.947911] env[62525]: DEBUG nova.scheduler.client.report [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2022.098064] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.346411] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: e8586018-100e-4729-97fc-98effa87cd9e] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.851096] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 2f713b35-9d07-4d25-a333-506fd2469bd5] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.957211] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.128s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.957429] env[62525]: DEBUG nova.compute.manager [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62525) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 2022.960204] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.862s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.960422] env[62525]: DEBUG nova.objects.instance [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lazy-loading 'resources' on Instance uuid 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2023.354282] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 54d1a1ed-0880-4cca-8759-585dc65bdb1a] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2023.506343] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5f77a6-df51-42f4-879c-42d9ee117de8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.510535] env[62525]: INFO nova.scheduler.client.report [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted allocation for migration e51c58fb-1b55-4171-900a-4033d7ceb081 [ 2023.517526] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6f992e-6a58-4502-bd47-17e90c1f79e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.547883] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b34e6c6-5514-43d9-ae3c-ac89373ce7ca {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.555516] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b436c98-4a7b-401b-831c-e7cb0356a2d9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.569857] env[62525]: DEBUG nova.compute.provider_tree [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2023.857400] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 8d8b8a9e-c9ad-42d3-8a71-9f6e62206394] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2024.016659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-93e7bca0-5700-4194-80d4-2ed9b8a43398 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.534s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.073228] env[62525]: DEBUG nova.scheduler.client.report [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2024.331599] env[62525]: DEBUG nova.objects.instance [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2024.361651] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 0067de08-6708-4c7c-a83a-ed9df193d5cd] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2024.578248] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.595107] env[62525]: INFO nova.scheduler.client.report [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Deleted allocations for instance 0e18bcbf-1b77-46cb-99d4-62668b9c8a55 [ 2024.837096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2024.837305] env[62525]: DEBUG oslo_concurrency.lockutils [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2024.837445] env[62525]: DEBUG nova.network.neutron [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2024.837687] env[62525]: DEBUG nova.objects.instance [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'info_cache' on Instance uuid 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2024.865063] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 94560d78-071c-419d-ad10-f42a5b2271a8] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.102152] env[62525]: DEBUG oslo_concurrency.lockutils [None req-6349f3b2-6176-4938-99fb-aa5ce4c0703c tempest-AttachVolumeShelveTestJSON-1969760395 tempest-AttachVolumeShelveTestJSON-1969760395-project-member] Lock "0e18bcbf-1b77-46cb-99d4-62668b9c8a55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.055s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.341310] env[62525]: DEBUG nova.objects.base [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Object Instance<870d7795-49ca-4201-983a-a85b590e805e> lazy-loaded attributes: flavor,info_cache {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2025.368011] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 2f589dc1-9244-475f-86d0-4b69b511508b] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.870920] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: 6e9051e9-aa89-408f-8f62-533085dc1312] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.227373] env[62525]: DEBUG nova.network.neutron [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [{"id": "10f619d4-6192-4474-84e8-35cecf4327f7", "address": "fa:16:3e:d5:48:93", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10f619d4-61", "ovs_interfaceid": "10f619d4-6192-4474-84e8-35cecf4327f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.374266] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: fdc446c9-d6dc-4d31-bcf2-59b43a0e91f3] Instance has had 0 of 5 cleanup attempts {{(pid=62525) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.730659] env[62525]: DEBUG oslo_concurrency.lockutils [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-870d7795-49ca-4201-983a-a85b590e805e" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.877625] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.877991] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Cleaning up deleted instances with incomplete migration {{(pid=62525) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2027.234655] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2027.234655] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-400913fb-eb3f-48b3-baf5-954c2fa63798 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.242865] env[62525]: DEBUG oslo_vmware.api [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2027.242865] env[62525]: value = "task-1782394" [ 2027.242865] env[62525]: _type = "Task" [ 2027.242865] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.251050] env[62525]: DEBUG oslo_vmware.api [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782394, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.380601] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.753052] env[62525]: DEBUG oslo_vmware.api [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782394, 'name': PowerOnVM_Task, 'duration_secs': 0.361492} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.753324] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2027.753538] env[62525]: DEBUG nova.compute.manager [None req-98cc16d3-11ca-4b15-8958-ddf222e7245c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2027.754280] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617f38a4-9906-429f-9711-648299bed695 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.922709] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.923149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.923231] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "870d7795-49ca-4201-983a-a85b590e805e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.923443] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.923627] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.925814] env[62525]: INFO nova.compute.manager [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Terminating instance [ 2028.927621] env[62525]: DEBUG nova.compute.manager [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2028.927816] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2028.928710] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921d6c2a-19cb-413f-b4dd-b3f181e6ef65 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.936719] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2028.936930] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2e86a3f-1a0a-44b6-96bb-c495ea239663 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.945151] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2028.945151] env[62525]: value = "task-1782395" [ 2028.945151] env[62525]: _type = "Task" [ 2028.945151] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.953028] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.454450] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782395, 'name': PowerOffVM_Task, 'duration_secs': 0.197202} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.454701] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2029.454867] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2029.455152] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9625530-05e0-4b2e-ac40-1135377f3361 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.628287] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2029.628483] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2029.628701] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleting the datastore file [datastore1] 870d7795-49ca-4201-983a-a85b590e805e {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2029.628947] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6588640e-fc1e-4e2e-b400-2bd4972e5413 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.635685] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2029.635685] env[62525]: value = "task-1782397" [ 2029.635685] env[62525]: _type = "Task" [ 2029.635685] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.643076] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782397, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.148802] env[62525]: DEBUG oslo_vmware.api [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782397, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14439} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.148802] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2030.148802] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2030.148802] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2030.148802] env[62525]: INFO nova.compute.manager [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2030.148802] env[62525]: DEBUG oslo.service.loopingcall [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2030.148802] env[62525]: DEBUG nova.compute.manager [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2030.148802] env[62525]: DEBUG nova.network.neutron [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2030.586608] env[62525]: DEBUG nova.compute.manager [req-e5ccb4e1-4575-456a-9fd2-26c0bf881e96 req-1d1709a8-a0e9-4a50-9d9c-d4dcfb40aa7b service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Received event network-vif-deleted-10f619d4-6192-4474-84e8-35cecf4327f7 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2030.586673] env[62525]: INFO nova.compute.manager [req-e5ccb4e1-4575-456a-9fd2-26c0bf881e96 req-1d1709a8-a0e9-4a50-9d9c-d4dcfb40aa7b service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Neutron deleted interface 10f619d4-6192-4474-84e8-35cecf4327f7; detaching it from the instance and deleting it from the info cache [ 2030.586862] env[62525]: DEBUG nova.network.neutron [req-e5ccb4e1-4575-456a-9fd2-26c0bf881e96 req-1d1709a8-a0e9-4a50-9d9c-d4dcfb40aa7b service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.053530] env[62525]: DEBUG nova.network.neutron [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.089246] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-941e0dbe-939a-4652-947b-4a58d7cb4c72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.099362] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c06281-f430-492f-b31a-a242f4e7ce17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.124846] env[62525]: DEBUG nova.compute.manager [req-e5ccb4e1-4575-456a-9fd2-26c0bf881e96 req-1d1709a8-a0e9-4a50-9d9c-d4dcfb40aa7b service nova] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Detach interface failed, port_id=10f619d4-6192-4474-84e8-35cecf4327f7, reason: Instance 870d7795-49ca-4201-983a-a85b590e805e could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2031.557540] env[62525]: INFO nova.compute.manager [-] [instance: 870d7795-49ca-4201-983a-a85b590e805e] Took 1.41 seconds to deallocate network for instance. [ 2032.063867] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.064180] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.064377] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.089744] env[62525]: INFO nova.scheduler.client.report [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted allocations for instance 870d7795-49ca-4201-983a-a85b590e805e [ 2032.598085] env[62525]: DEBUG oslo_concurrency.lockutils [None req-e2933b79-3960-441f-89b1-a1758d9df383 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "870d7795-49ca-4201-983a-a85b590e805e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.675s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.812864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.813170] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.315988] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2034.947663] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.947967] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.949775] env[62525]: INFO nova.compute.claims [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2035.984188] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfe84c7-fea3-4f66-85e5-131ce565dd46 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.991767] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8abd99e-901a-4636-baf7-e3a8fe2b8a19 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.020854] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdabd83-ec2c-4aff-900a-f82739db6d9c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.028008] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a178fd51-41c9-460a-80f4-ccf6838dd043 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.040586] env[62525]: DEBUG nova.compute.provider_tree [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.543475] env[62525]: DEBUG nova.scheduler.client.report [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2037.049320] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.049870] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2037.555339] env[62525]: DEBUG nova.compute.utils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2037.557458] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2037.557668] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2037.609007] env[62525]: DEBUG nova.policy [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e20c4d99e0b4e08a3b92f274ca94354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6dbd20742b0f42d5ac04268223bfe911', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 2037.871249] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Successfully created port: 99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2038.060682] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2039.070368] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2039.096653] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2039.096893] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2039.097064] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2039.097253] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2039.097432] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2039.097605] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2039.097810] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2039.097970] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2039.098149] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2039.098309] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2039.098508] env[62525]: DEBUG nova.virt.hardware [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2039.099372] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415113ed-3c14-409a-b479-3e5d36d45f9d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.106840] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9a7af5-07d4-402b-b4ac-7caa008b0c11 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.237527] env[62525]: DEBUG nova.compute.manager [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Received event network-vif-plugged-99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2039.237778] env[62525]: DEBUG oslo_concurrency.lockutils [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.237930] env[62525]: DEBUG oslo_concurrency.lockutils [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.238108] env[62525]: DEBUG oslo_concurrency.lockutils [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.238277] env[62525]: DEBUG nova.compute.manager [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] No waiting events found dispatching network-vif-plugged-99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2039.238446] env[62525]: WARNING nova.compute.manager [req-f158ea26-efde-4c5f-abc5-6470f3df520f req-3b23a3bf-16c5-4837-9914-585ef0db9714 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Received unexpected event network-vif-plugged-99600567-0e8c-4662-9833-1b9ae66d1e51 for instance with vm_state building and task_state spawning. [ 2039.317992] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Successfully updated port: 99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2039.820864] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.821049] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.821183] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2040.351911] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2040.471743] env[62525]: DEBUG nova.network.neutron [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.974868] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.975182] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Instance network_info: |[{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2040.975686] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:0b:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99600567-0e8c-4662-9833-1b9ae66d1e51', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2040.983700] env[62525]: DEBUG oslo.service.loopingcall [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.983912] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2040.984150] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-741c2616-0aaf-4ac1-851c-a3fae81d3a96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.003554] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2041.003554] env[62525]: value = "task-1782398" [ 2041.003554] env[62525]: _type = "Task" [ 2041.003554] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.011669] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782398, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.263617] env[62525]: DEBUG nova.compute.manager [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Received event network-changed-99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2041.263824] env[62525]: DEBUG nova.compute.manager [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Refreshing instance network info cache due to event network-changed-99600567-0e8c-4662-9833-1b9ae66d1e51. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2041.264048] env[62525]: DEBUG oslo_concurrency.lockutils [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.264194] env[62525]: DEBUG oslo_concurrency.lockutils [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.264353] env[62525]: DEBUG nova.network.neutron [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Refreshing network info cache for port 99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2041.513365] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782398, 'name': CreateVM_Task, 'duration_secs': 0.324642} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.513680] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2041.514176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.514347] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.514664] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2041.514910] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0affa8d7-8123-4741-baba-194ab2ba984e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.518988] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2041.518988] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5261c696-12ca-a19d-cc2f-01c31e848269" [ 2041.518988] env[62525]: _type = "Task" [ 2041.518988] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.526352] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261c696-12ca-a19d-cc2f-01c31e848269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.963102] env[62525]: DEBUG nova.network.neutron [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updated VIF entry in instance network info cache for port 99600567-0e8c-4662-9833-1b9ae66d1e51. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2041.963479] env[62525]: DEBUG nova.network.neutron [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.029353] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5261c696-12ca-a19d-cc2f-01c31e848269, 'name': SearchDatastore_Task, 'duration_secs': 0.010669} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.029636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.029869] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2042.030110] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.030259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.030440] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2042.030678] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac230875-3cfb-44da-9e29-202b0e8d39f7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.039338] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2042.039505] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2042.040222] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153db536-80fe-4871-898a-8e591f1e1c38 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.044966] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2042.044966] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52e3d212-e25b-e341-f2d7-dba8266e10d0" [ 2042.044966] env[62525]: _type = "Task" [ 2042.044966] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.052047] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3d212-e25b-e341-f2d7-dba8266e10d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.466632] env[62525]: DEBUG oslo_concurrency.lockutils [req-0369321e-8f5f-4ea7-b622-cd1f7677ba5c req-2d2afae8-3acf-41b4-8859-73b9a73f5cc7 service nova] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.555250] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52e3d212-e25b-e341-f2d7-dba8266e10d0, 'name': SearchDatastore_Task, 'duration_secs': 0.008285} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.556023] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e3d212-89c9-43c8-9184-042836a3d4cd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.561042] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2042.561042] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b5f5a6-e886-d774-f88f-72924140ec6b" [ 2042.561042] env[62525]: _type = "Task" [ 2042.561042] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.568374] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b5f5a6-e886-d774-f88f-72924140ec6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.070859] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b5f5a6-e886-d774-f88f-72924140ec6b, 'name': SearchDatastore_Task, 'duration_secs': 0.025497} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.071132] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.071387] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2043.071642] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d1efb49-73de-4f48-9937-8edcf0a0a069 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.077773] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2043.077773] env[62525]: value = "task-1782399" [ 2043.077773] env[62525]: _type = "Task" [ 2043.077773] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.084633] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782399, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.586919] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782399, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480819} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.587317] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2043.587446] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2043.587716] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25d158fa-43a9-40d0-95df-eeae9f1e120a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.594235] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2043.594235] env[62525]: value = "task-1782400" [ 2043.594235] env[62525]: _type = "Task" [ 2043.594235] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.601045] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782400, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.106026] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782400, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067223} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.106026] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2044.106625] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c576f97a-53a9-4e6d-9c90-3dab9b97578c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.127801] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2044.128040] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4e84805-6f38-4d62-9e10-6405fb882e52 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.146908] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2044.146908] env[62525]: value = "task-1782401" [ 2044.146908] env[62525]: _type = "Task" [ 2044.146908] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.154213] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.657234] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.157455] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.657919] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.158382] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.658983] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.160094] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.661294] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.161810] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782401, 'name': ReconfigVM_Task, 'duration_secs': 3.589367} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.162212] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2048.162839] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa307990-f240-4573-9f46-3054b6e69d98 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.169211] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2048.169211] env[62525]: value = "task-1782402" [ 2048.169211] env[62525]: _type = "Task" [ 2048.169211] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.176432] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782402, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.679574] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782402, 'name': Rename_Task, 'duration_secs': 0.133849} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.679931] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2048.680163] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-314dd5e3-b400-48af-b2ec-e5a064dc43c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.686765] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2048.686765] env[62525]: value = "task-1782403" [ 2048.686765] env[62525]: _type = "Task" [ 2048.686765] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.694156] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.197173] env[62525]: DEBUG oslo_vmware.api [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782403, 'name': PowerOnVM_Task, 'duration_secs': 0.420786} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.197493] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2049.197644] env[62525]: INFO nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Took 10.13 seconds to spawn the instance on the hypervisor. [ 2049.197854] env[62525]: DEBUG nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2049.198630] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1122de2b-1007-469d-9776-43cd414b2b41 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.716732] env[62525]: INFO nova.compute.manager [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Took 14.90 seconds to build instance. [ 2050.090431] env[62525]: DEBUG nova.compute.manager [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Received event network-changed-99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2050.090629] env[62525]: DEBUG nova.compute.manager [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Refreshing instance network info cache due to event network-changed-99600567-0e8c-4662-9833-1b9ae66d1e51. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2050.090848] env[62525]: DEBUG oslo_concurrency.lockutils [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2050.090994] env[62525]: DEBUG oslo_concurrency.lockutils [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2050.091168] env[62525]: DEBUG nova.network.neutron [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Refreshing network info cache for port 99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2050.218543] env[62525]: DEBUG oslo_concurrency.lockutils [None req-f3a93c9d-bc3e-46a2-a303-3ae833584621 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.405s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.797436] env[62525]: DEBUG nova.network.neutron [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updated VIF entry in instance network info cache for port 99600567-0e8c-4662-9833-1b9ae66d1e51. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2050.797806] env[62525]: DEBUG nova.network.neutron [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.301217] env[62525]: DEBUG oslo_concurrency.lockutils [req-94764dc2-2826-4602-8846-682e9c185245 req-81ccb97d-11e2-4057-a46d-21c943851dec service nova] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.907062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.907062] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.411219] env[62525]: DEBUG nova.compute.utils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2086.914611] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.883787] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.884037] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.973793] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.974229] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.974431] env[62525]: INFO nova.compute.manager [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Attaching volume 2df595b5-c605-4aa5-a54c-51e8906f0390 to /dev/sdb [ 2088.003942] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec044c3-7191-4ea5-ad67-ccfc43b7d5b9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.012913] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4e363c-6909-44f5-a402-573b60df1441 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.025822] env[62525]: DEBUG nova.virt.block_device [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating existing volume attachment record: 46ba5e29-de1c-4f69-a11d-0bddda3b07f2 {{(pid=62525) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2088.388237] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.388411] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2089.896954] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2089.897384] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.897432] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.897576] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.897723] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.897862] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.898019] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.898217] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2089.898381] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.401984] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.402256] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.402421] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.402583] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2090.403505] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e842751e-00b0-446f-a32d-31c9e1cf1828 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.411717] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8802b3c5-afbb-4ea2-ae5e-1bc11a9d2aec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.425934] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78523e61-bd0a-4f2b-b253-b062783953dd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.432113] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69134d5c-f698-4aaa-b11c-5ef0693279c6 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.462317] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181210MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2090.462460] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.462646] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.621889] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance 2fddc809-c89c-47d7-b556-c94dfa46621c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2091.622164] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2091.622253] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2091.646204] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fceef90-9257-4fe5-a4f9-6b530d5511e5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.653234] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8905f0b0-e94c-4cb4-9824-9d12ab7d0f5f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.682236] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe451997-baa5-478a-8d22-6c9d53a08330 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.688875] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c634dcb-c8e8-45d9-9e82-423cd8197b7a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.701246] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2092.204759] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2092.710137] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2092.710467] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.248s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.068749] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Volume attach. Driver type: vmdk {{(pid=62525) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2093.068996] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369885', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'name': 'volume-2df595b5-c605-4aa5-a54c-51e8906f0390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fddc809-c89c-47d7-b556-c94dfa46621c', 'attached_at': '', 'detached_at': '', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'serial': '2df595b5-c605-4aa5-a54c-51e8906f0390'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2093.069944] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e2af1b-f457-4215-9bfe-a34d1731e83b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.087055] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761aa4af-cdb5-4e36-8f02-b15bce2bc839 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.110598] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-2df595b5-c605-4aa5-a54c-51e8906f0390/volume-2df595b5-c605-4aa5-a54c-51e8906f0390.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2093.110823] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d70d201-7f23-4d5c-98fc-ce343cf22400 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.128367] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2093.128367] env[62525]: value = "task-1782408" [ 2093.128367] env[62525]: _type = "Task" [ 2093.128367] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.138515] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782408, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.638658] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782408, 'name': ReconfigVM_Task, 'duration_secs': 0.322918} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.638952] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-2df595b5-c605-4aa5-a54c-51e8906f0390/volume-2df595b5-c605-4aa5-a54c-51e8906f0390.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2093.643433] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df757fe3-6418-4c3d-8d9b-b177897f970a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.658573] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2093.658573] env[62525]: value = "task-1782409" [ 2093.658573] env[62525]: _type = "Task" [ 2093.658573] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.668300] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782409, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.168821] env[62525]: DEBUG oslo_vmware.api [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782409, 'name': ReconfigVM_Task, 'duration_secs': 0.135104} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.169248] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369885', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'name': 'volume-2df595b5-c605-4aa5-a54c-51e8906f0390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2fddc809-c89c-47d7-b556-c94dfa46621c', 'attached_at': '', 'detached_at': '', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'serial': '2df595b5-c605-4aa5-a54c-51e8906f0390'} {{(pid=62525) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2095.202860] env[62525]: DEBUG nova.objects.instance [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'flavor' on Instance uuid 2fddc809-c89c-47d7-b556-c94dfa46621c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2095.708920] env[62525]: DEBUG oslo_concurrency.lockutils [None req-31ede2cf-6b05-4c3c-a940-880a137f61e1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.735s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.171134] env[62525]: DEBUG nova.compute.manager [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Stashing vm_state: active {{(pid=62525) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2097.693013] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.693308] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.199173] env[62525]: INFO nova.compute.claims [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2098.704999] env[62525]: INFO nova.compute.resource_tracker [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating resource usage from migration c772edb4-dbd1-41f7-8bb9-44833e5986a5 [ 2098.742308] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b072c133-0af9-464d-aff9-2609da288b4e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.750126] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cf1ccb-b931-45e3-aa2c-95d859052b5e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.782560] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c2cdcc-84a6-462b-9c95-4dea76542bac {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.790011] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec6a198-e1c8-4973-b57c-92ade0de9045 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.802660] env[62525]: DEBUG nova.compute.provider_tree [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2099.305834] env[62525]: DEBUG nova.scheduler.client.report [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2099.811548] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.118s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.811838] env[62525]: INFO nova.compute.manager [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Migrating [ 2100.327388] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2100.327958] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2100.328130] env[62525]: DEBUG nova.network.neutron [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2101.044566] env[62525]: DEBUG nova.network.neutron [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.547176] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.062198] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f6a600-b6bf-40bc-b023-c293c0d09865 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.083583] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 0 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2103.589454] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2103.589767] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e54e3600-49c8-4f1f-bc9b-b56a81d8ef3d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.597199] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2103.597199] env[62525]: value = "task-1782410" [ 2103.597199] env[62525]: _type = "Task" [ 2103.597199] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.605264] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.109192] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782410, 'name': PowerOffVM_Task, 'duration_secs': 0.207545} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.109584] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2104.109628] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 17 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2104.617028] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2104.617028] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2104.617028] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2104.617028] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2104.617028] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2104.617712] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2104.618126] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2104.618425] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2104.618761] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2104.620978] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2104.620978] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2104.626381] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03461153-03be-447f-b730-559d34b63873 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.643757] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2104.643757] env[62525]: value = "task-1782411" [ 2104.643757] env[62525]: _type = "Task" [ 2104.643757] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.654046] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.153810] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782411, 'name': ReconfigVM_Task, 'duration_secs': 0.176488} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.154260] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 33 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2105.660298] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2105.660565] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2105.660707] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2105.660884] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2105.661042] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2105.661196] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2105.661399] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2105.661551] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2105.661713] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2105.661877] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2105.662215] env[62525]: DEBUG nova.virt.hardware [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2105.667457] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2105.667742] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa012156-e8a4-41f4-96ef-ad56eb5a7720 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.685925] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2105.685925] env[62525]: value = "task-1782412" [ 2105.685925] env[62525]: _type = "Task" [ 2105.685925] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.693600] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.195844] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782412, 'name': ReconfigVM_Task, 'duration_secs': 0.192137} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.196223] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2106.196867] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead76cf3-8326-4035-aed0-c1be6c91b641 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.221779] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2106.222372] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-502e3bbb-5db7-4e0e-85c1-680a08a7d60e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.240485] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2106.240485] env[62525]: value = "task-1782413" [ 2106.240485] env[62525]: _type = "Task" [ 2106.240485] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.248123] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.750588] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782413, 'name': ReconfigVM_Task, 'duration_secs': 0.305243} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.752047] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2106.752047] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 50 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2107.258052] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d97194-97c7-4268-a547-88502c3920ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.280307] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087955d4-2338-407c-b1a3-5e480b567a92 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.299884] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 67 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2108.934490] env[62525]: DEBUG nova.network.neutron [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Port 99600567-0e8c-4662-9833-1b9ae66d1e51 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2109.956869] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2109.957283] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.957495] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.990707] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2110.990993] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.991085] env[62525]: DEBUG nova.network.neutron [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2111.712274] env[62525]: DEBUG nova.network.neutron [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2112.214829] env[62525]: DEBUG oslo_concurrency.lockutils [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.723935] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75264a8c-e492-444a-a8a9-797bd166adb1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.731176] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d7b1fe-e9f1-4388-9130-0f004151e36e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.823980] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ef11e0-f765-452a-aaa8-21cf86848f56 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.845584] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26829857-0ad4-4084-b7af-301957e87b20 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.851945] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 83 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2114.358568] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2114.358948] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-036d77db-684e-4cae-8101-b98f32208fec {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.366015] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2114.366015] env[62525]: value = "task-1782414" [ 2114.366015] env[62525]: _type = "Task" [ 2114.366015] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.373572] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.875682] env[62525]: DEBUG oslo_vmware.api [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782414, 'name': PowerOnVM_Task, 'duration_secs': 0.363125} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.876039] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2114.876239] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-76ddeb04-0b7a-4dd9-bfab-6e25a1532025 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance '2fddc809-c89c-47d7-b556-c94dfa46621c' progress to 100 {{(pid=62525) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2117.843117] env[62525]: DEBUG nova.network.neutron [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Port 99600567-0e8c-4662-9833-1b9ae66d1e51 binding to destination host cpu-1 is already ACTIVE {{(pid=62525) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2117.843442] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.843525] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.843642] env[62525]: DEBUG nova.network.neutron [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.555692] env[62525]: DEBUG nova.network.neutron [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.058832] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.563195] env[62525]: DEBUG nova.compute.manager [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62525) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2120.661240] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.661642] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.164846] env[62525]: DEBUG nova.objects.instance [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'migration_context' on Instance uuid 2fddc809-c89c-47d7-b556-c94dfa46621c {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2121.710522] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b881035e-d31d-466a-be1e-b610e2545985 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.718318] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8907cd-2c07-4861-a8a7-d11ccead62de {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.749296] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d21e1c-afca-4d14-b4da-86e54399d0ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.756443] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cd960a-cdbf-4d4e-b9ce-ee3378128dc3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.769249] env[62525]: DEBUG nova.compute.provider_tree [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2122.273112] env[62525]: DEBUG nova.scheduler.client.report [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2123.283674] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.622s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.821706] env[62525]: INFO nova.compute.manager [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Swapping old allocation on dict_keys(['bb89c0ac-8f56-43c6-9f73-fd897be63424']) held by migration c772edb4-dbd1-41f7-8bb9-44833e5986a5 for instance [ 2124.844248] env[62525]: DEBUG nova.scheduler.client.report [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Overwriting current allocation {'allocations': {'bb89c0ac-8f56-43c6-9f73-fd897be63424': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 177}}, 'project_id': '6dbd20742b0f42d5ac04268223bfe911', 'user_id': '5e20c4d99e0b4e08a3b92f274ca94354', 'consumer_generation': 1} on consumer 2fddc809-c89c-47d7-b556-c94dfa46621c {{(pid=62525) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2124.927641] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.927842] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.928031] env[62525]: DEBUG nova.network.neutron [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2125.635713] env[62525]: DEBUG nova.network.neutron [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [{"id": "99600567-0e8c-4662-9833-1b9ae66d1e51", "address": "fa:16:3e:91:0b:07", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99600567-0e", "ovs_interfaceid": "99600567-0e8c-4662-9833-1b9ae66d1e51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.138539] env[62525]: DEBUG oslo_concurrency.lockutils [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-2fddc809-c89c-47d7-b556-c94dfa46621c" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.139578] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80edc7da-d1b9-4bea-a907-5d25b9b1a428 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.147120] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed38cb20-9d65-44d3-ab7a-2062300ae24e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.228601] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2127.229066] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f13689c6-230e-4401-a0b6-d35743a8f801 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.237841] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2127.237841] env[62525]: value = "task-1782415" [ 2127.237841] env[62525]: _type = "Task" [ 2127.237841] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.249275] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.747886] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782415, 'name': PowerOffVM_Task, 'duration_secs': 0.221229} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.748171] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2127.748830] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2127.749062] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2127.749225] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2127.749408] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2127.749556] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2127.749700] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2127.749903] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2127.750071] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2127.750240] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2127.750400] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2127.750570] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2127.755559] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da35143c-5dd4-467f-b822-380e2cda749c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.770959] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2127.770959] env[62525]: value = "task-1782416" [ 2127.770959] env[62525]: _type = "Task" [ 2127.770959] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.778324] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.281270] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782416, 'name': ReconfigVM_Task, 'duration_secs': 0.143468} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.282088] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be39c0f1-b08b-431f-b15f-124fa63b295d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.302197] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2128.302438] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2128.302600] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2128.302781] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2128.302929] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2128.303088] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2128.303292] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2128.303448] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2128.303610] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2128.303767] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2128.303935] env[62525]: DEBUG nova.virt.hardware [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2128.304701] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f5fed0e-250e-44fe-b3b9-3152e66a7ffc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.309809] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2128.309809] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52b7cfe2-51b6-9379-b823-d173542579ff" [ 2128.309809] env[62525]: _type = "Task" [ 2128.309809] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.317019] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b7cfe2-51b6-9379-b823-d173542579ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.819699] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52b7cfe2-51b6-9379-b823-d173542579ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010603} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.824883] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2128.825160] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32461470-69fe-488b-9eeb-8e19fb4538b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.843112] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2128.843112] env[62525]: value = "task-1782417" [ 2128.843112] env[62525]: _type = "Task" [ 2128.843112] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.851653] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.352884] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782417, 'name': ReconfigVM_Task, 'duration_secs': 0.211267} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.353223] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2129.353874] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65418582-f91f-4c8f-a784-e528849419da {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.377770] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2129.377995] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83d67c29-547e-4341-8eb5-28d61d904ef8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.396093] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2129.396093] env[62525]: value = "task-1782418" [ 2129.396093] env[62525]: _type = "Task" [ 2129.396093] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.403202] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.905714] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782418, 'name': ReconfigVM_Task, 'duration_secs': 0.292139} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.905961] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c/2fddc809-c89c-47d7-b556-c94dfa46621c.vmdk or device None with type thin {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2129.906800] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4499d6c0-2615-4f0c-9f85-9f8ed941406d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.926977] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f3c953-69af-4538-8281-9a2640b7de83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.946895] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec21ee96-fc7b-41e7-b813-58c7df6842e4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.966229] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92414ac-436a-41c2-a140-76839f24b183 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.972565] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2129.972786] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99bf0a65-5e11-4fde-9365-6e778e989dbb {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.979063] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2129.979063] env[62525]: value = "task-1782419" [ 2129.979063] env[62525]: _type = "Task" [ 2129.979063] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.985873] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.489513] env[62525]: DEBUG oslo_vmware.api [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782419, 'name': PowerOnVM_Task, 'duration_secs': 0.360596} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.489891] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2131.534844] env[62525]: INFO nova.compute.manager [None req-820b08c7-0cdd-4616-9843-dc4fd4572ee1 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance to original state: 'active' [ 2132.711377] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.711806] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.712315] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.712542] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.712718] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.714951] env[62525]: INFO nova.compute.manager [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Terminating instance [ 2132.717043] env[62525]: DEBUG nova.compute.manager [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2132.717272] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2132.717510] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7e2ef54-1421-423f-a641-ffa9c2019a7e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.727409] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2132.727409] env[62525]: value = "task-1782420" [ 2132.727409] env[62525]: _type = "Task" [ 2132.727409] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.735306] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.237280] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782420, 'name': PowerOffVM_Task, 'duration_secs': 0.176118} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.237575] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2133.237773] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Volume detach. Driver type: vmdk {{(pid=62525) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2133.237962] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369885', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'name': 'volume-2df595b5-c605-4aa5-a54c-51e8906f0390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '2fddc809-c89c-47d7-b556-c94dfa46621c', 'attached_at': '2024-12-12T00:23:30.000000', 'detached_at': '', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'serial': '2df595b5-c605-4aa5-a54c-51e8906f0390'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2133.238718] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b538a28-14af-4a26-8741-d2842e45440a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.259679] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13d6377-897c-47a7-89b9-d398a3f8c4b7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.265896] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7678df8d-4bf0-4df8-aade-4b417c827c5c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.287021] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6ce2f3-d9e2-409d-b5dd-3ec1b38ff93d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.301190] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] The volume has not been displaced from its original location: [datastore1] volume-2df595b5-c605-4aa5-a54c-51e8906f0390/volume-2df595b5-c605-4aa5-a54c-51e8906f0390.vmdk. No consolidation needed. {{(pid=62525) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2133.306302] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2133.306543] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bdd92a8-85eb-491d-94af-8b707d73243c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.324381] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2133.324381] env[62525]: value = "task-1782421" [ 2133.324381] env[62525]: _type = "Task" [ 2133.324381] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.331955] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782421, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.834148] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782421, 'name': ReconfigVM_Task, 'duration_secs': 0.389958} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.834534] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=62525) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2133.838968] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48e3eff3-95c4-411a-863a-54f7b10eb814 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.853330] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2133.853330] env[62525]: value = "task-1782422" [ 2133.853330] env[62525]: _type = "Task" [ 2133.853330] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.860505] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.363601] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782422, 'name': ReconfigVM_Task, 'duration_secs': 0.124397} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.363885] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369885', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'name': 'volume-2df595b5-c605-4aa5-a54c-51e8906f0390', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '2fddc809-c89c-47d7-b556-c94dfa46621c', 'attached_at': '2024-12-12T00:23:30.000000', 'detached_at': '', 'volume_id': '2df595b5-c605-4aa5-a54c-51e8906f0390', 'serial': '2df595b5-c605-4aa5-a54c-51e8906f0390'} {{(pid=62525) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2134.364187] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2134.364982] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00eeb67-6b18-4d4a-90c8-34c66d209cf3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.371245] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2134.371455] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-580f5649-9779-4602-82c5-8b8744b86b17 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.446077] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2134.446326] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2134.446531] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleting the datastore file [datastore1] 2fddc809-c89c-47d7-b556-c94dfa46621c {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2134.446820] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-703d7f7d-24ef-47a8-b3f1-58074be6473c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.453859] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2134.453859] env[62525]: value = "task-1782424" [ 2134.453859] env[62525]: _type = "Task" [ 2134.453859] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.461689] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.963469] env[62525]: DEBUG oslo_vmware.api [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137204} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.963863] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2134.963907] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2134.964080] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2134.964344] env[62525]: INFO nova.compute.manager [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Took 2.25 seconds to destroy the instance on the hypervisor. [ 2134.964595] env[62525]: DEBUG oslo.service.loopingcall [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.964788] env[62525]: DEBUG nova.compute.manager [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2134.964884] env[62525]: DEBUG nova.network.neutron [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2135.412522] env[62525]: DEBUG nova.compute.manager [req-5beb9f4a-eca3-45ce-a9af-cf5568671a66 req-e5280e82-f691-418f-8f0a-0b350d213318 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Received event network-vif-deleted-99600567-0e8c-4662-9833-1b9ae66d1e51 {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2135.412729] env[62525]: INFO nova.compute.manager [req-5beb9f4a-eca3-45ce-a9af-cf5568671a66 req-e5280e82-f691-418f-8f0a-0b350d213318 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Neutron deleted interface 99600567-0e8c-4662-9833-1b9ae66d1e51; detaching it from the instance and deleting it from the info cache [ 2135.412901] env[62525]: DEBUG nova.network.neutron [req-5beb9f4a-eca3-45ce-a9af-cf5568671a66 req-e5280e82-f691-418f-8f0a-0b350d213318 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.885861] env[62525]: DEBUG nova.network.neutron [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.917889] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-516f0a6a-c7c0-4f43-8eb8-5bbbb835b3b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.927529] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42aa84a-c86a-48ee-838c-e130357642a7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.950974] env[62525]: DEBUG nova.compute.manager [req-5beb9f4a-eca3-45ce-a9af-cf5568671a66 req-e5280e82-f691-418f-8f0a-0b350d213318 service nova] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Detach interface failed, port_id=99600567-0e8c-4662-9833-1b9ae66d1e51, reason: Instance 2fddc809-c89c-47d7-b556-c94dfa46621c could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2136.388669] env[62525]: INFO nova.compute.manager [-] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Took 1.42 seconds to deallocate network for instance. [ 2136.932131] env[62525]: INFO nova.compute.manager [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: 2fddc809-c89c-47d7-b556-c94dfa46621c] Took 0.54 seconds to detach 1 volumes for instance. [ 2137.439226] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.439580] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.439711] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.462328] env[62525]: INFO nova.scheduler.client.report [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted allocations for instance 2fddc809-c89c-47d7-b556-c94dfa46621c [ 2137.969419] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3d91f4a2-24fe-4737-afa7-bdef1a853b37 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "2fddc809-c89c-47d7-b556-c94dfa46621c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.258s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.779991] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.780296] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.282591] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Starting instance... {{(pid=62525) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2139.805345] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.805669] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.807525] env[62525]: INFO nova.compute.claims [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2140.843526] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d812176d-84ad-4fe7-a073-8816a6b15994 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.851072] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e12da7c-f824-4807-8246-1c79b2a9689f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.880827] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a819f8-17a0-4560-b309-acde97e7b672 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.887937] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bec097-3792-4102-910c-ace36fb1d947 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.900503] env[62525]: DEBUG nova.compute.provider_tree [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2141.404092] env[62525]: DEBUG nova.scheduler.client.report [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2141.909245] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.909779] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Start building networks asynchronously for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2142.414916] env[62525]: DEBUG nova.compute.utils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Using /dev/sd instead of None {{(pid=62525) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2142.416399] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Allocating IP information in the background. {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2142.416554] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] allocate_for_instance() {{(pid=62525) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2142.467070] env[62525]: DEBUG nova.policy [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e20c4d99e0b4e08a3b92f274ca94354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6dbd20742b0f42d5ac04268223bfe911', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62525) authorize /opt/stack/nova/nova/policy.py:201}} [ 2142.725719] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Successfully created port: f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2142.919927] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Start building block device mappings for instance. {{(pid=62525) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2143.930699] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Start spawning the instance on the hypervisor. {{(pid=62525) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2143.956021] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:54:36Z,direct_url=,disk_format='vmdk',id=a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='c6489820a95e4a7db91372ce766ff6d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:54:37Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2143.956318] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2143.956478] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2143.956658] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2143.956806] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2143.956952] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2143.957202] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2143.957386] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2143.957554] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2143.957715] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2143.957886] env[62525]: DEBUG nova.virt.hardware [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2143.958764] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7daf63f2-5cbe-462f-963a-30cee9fd5d13 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.966852] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47748345-9e82-49f9-ae2b-19da5bbc4303 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.105979] env[62525]: DEBUG nova.compute.manager [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2144.106231] env[62525]: DEBUG oslo_concurrency.lockutils [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.106435] env[62525]: DEBUG oslo_concurrency.lockutils [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.106606] env[62525]: DEBUG oslo_concurrency.lockutils [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.106773] env[62525]: DEBUG nova.compute.manager [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] No waiting events found dispatching network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2144.106936] env[62525]: WARNING nova.compute.manager [req-74253537-b807-4c59-aba4-e4531a55a15d req-36b1e7dd-ce9d-4ca5-9c73-073c9537d7b2 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received unexpected event network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a for instance with vm_state building and task_state spawning. [ 2144.203947] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Successfully updated port: f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2144.706925] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.707291] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.707430] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2145.248513] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance cache missing network info. {{(pid=62525) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2145.377710] env[62525]: DEBUG nova.network.neutron [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.880214] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.880542] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance network_info: |[{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62525) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2145.880987] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:94:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1baff8b-9244-418b-bb8a-12ffcfcb7f3a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2145.888476] env[62525]: DEBUG oslo.service.loopingcall [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2145.888685] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2145.889303] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38e0e83a-da7b-4358-a40a-4f2fb0cf7b8b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.909012] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2145.909012] env[62525]: value = "task-1782425" [ 2145.909012] env[62525]: _type = "Task" [ 2145.909012] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.916369] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782425, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.136913] env[62525]: DEBUG nova.compute.manager [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2146.137197] env[62525]: DEBUG nova.compute.manager [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing instance network info cache due to event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2146.137389] env[62525]: DEBUG oslo_concurrency.lockutils [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.137539] env[62525]: DEBUG oslo_concurrency.lockutils [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.137702] env[62525]: DEBUG nova.network.neutron [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2146.418021] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782425, 'name': CreateVM_Task, 'duration_secs': 0.300978} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.418297] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2146.418873] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.419056] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.419409] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2146.419659] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b4c270d-c92f-45d6-b1c2-aaa48d5a1d85 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.423854] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2146.423854] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52282289-4237-231d-d8ce-8c31f78476fe" [ 2146.423854] env[62525]: _type = "Task" [ 2146.423854] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.431254] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52282289-4237-231d-d8ce-8c31f78476fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.837826] env[62525]: DEBUG nova.network.neutron [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated VIF entry in instance network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2146.838224] env[62525]: DEBUG nova.network.neutron [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.934382] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52282289-4237-231d-d8ce-8c31f78476fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.934636] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.934865] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Processing image a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2146.935107] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.935261] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.935436] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2146.935679] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41efcdda-fc98-4c2c-b58b-de99c5fd7fd0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.944007] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2146.944199] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2146.944871] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a12b0223-cd42-4247-8d9d-1b085dd4c6c8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.949605] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2146.949605] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5205ee89-b980-a0a0-b6fc-c8a1f4d0aacc" [ 2146.949605] env[62525]: _type = "Task" [ 2146.949605] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.956371] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5205ee89-b980-a0a0-b6fc-c8a1f4d0aacc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.340957] env[62525]: DEBUG oslo_concurrency.lockutils [req-cc72a066-1076-46a8-aa0c-e42daae90b23 req-9812a111-859f-4676-b5f3-340fc7e0801e service nova] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.462183] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5205ee89-b980-a0a0-b6fc-c8a1f4d0aacc, 'name': SearchDatastore_Task, 'duration_secs': 0.007862} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.462949] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3770ec79-3f1f-4f94-a087-690d89432314 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.467817] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2147.467817] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d713d2-6766-efa2-fa8d-92dd7a68ea6c" [ 2147.467817] env[62525]: _type = "Task" [ 2147.467817] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.475143] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d713d2-6766-efa2-fa8d-92dd7a68ea6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.978223] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52d713d2-6766-efa2-fa8d-92dd7a68ea6c, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.978481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.978742] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2147.979013] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9ed2617-50c9-4fc3-a576-b8d0a32e2b33 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.985808] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2147.985808] env[62525]: value = "task-1782426" [ 2147.985808] env[62525]: _type = "Task" [ 2147.985808] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.993016] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782426, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.495336] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782426, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437919} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.495702] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36/a5a2dbb7-f75e-42d0-a67b-cb3f946c1f36.vmdk to [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2148.495785] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Extending root virtual disk to 1048576 {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2148.496067] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d8291cf-1635-4571-9bd4-43fcc544c5fe {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.502192] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2148.502192] env[62525]: value = "task-1782427" [ 2148.502192] env[62525]: _type = "Task" [ 2148.502192] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.509346] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.011969] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099781} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.012361] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Extended root virtual disk {{(pid=62525) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2149.013070] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69cd236-708f-41e6-bc7e-af03ca2ecdfc {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.034485] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2149.034794] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af599c3a-6458-4fd6-b61a-1d4b1e131a0a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.053905] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2149.053905] env[62525]: value = "task-1782428" [ 2149.053905] env[62525]: _type = "Task" [ 2149.053905] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.061353] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.564432] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782428, 'name': ReconfigVM_Task, 'duration_secs': 0.292161} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.564799] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Reconfigured VM instance instance-0000007c to attach disk [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk or device None with type sparse {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2149.565417] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f50da7b-2d1d-4750-ad0b-9e2db1ce368d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.571497] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2149.571497] env[62525]: value = "task-1782429" [ 2149.571497] env[62525]: _type = "Task" [ 2149.571497] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.579126] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782429, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.081755] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782429, 'name': Rename_Task, 'duration_secs': 0.138181} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.082049] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2150.082280] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d577a161-032f-41d6-8fb6-32dd870453a3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.088396] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2150.088396] env[62525]: value = "task-1782430" [ 2150.088396] env[62525]: _type = "Task" [ 2150.088396] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.095664] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.598433] env[62525]: DEBUG oslo_vmware.api [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782430, 'name': PowerOnVM_Task, 'duration_secs': 0.448279} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.598829] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2150.598829] env[62525]: INFO nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Took 6.67 seconds to spawn the instance on the hypervisor. [ 2150.598976] env[62525]: DEBUG nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2150.599757] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98631f67-b56c-4717-b3ee-1770546e0a5d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.116882] env[62525]: INFO nova.compute.manager [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Took 11.33 seconds to build instance. [ 2151.619398] env[62525]: DEBUG oslo_concurrency.lockutils [None req-8bcfd666-5ce3-4d60-850d-6fa3541d8ce6 tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.839s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2151.892206] env[62525]: DEBUG nova.compute.manager [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2151.892407] env[62525]: DEBUG nova.compute.manager [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing instance network info cache due to event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2151.892635] env[62525]: DEBUG oslo_concurrency.lockutils [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.892780] env[62525]: DEBUG oslo_concurrency.lockutils [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.892941] env[62525]: DEBUG nova.network.neutron [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2152.607380] env[62525]: DEBUG nova.network.neutron [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated VIF entry in instance network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2152.607755] env[62525]: DEBUG nova.network.neutron [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.711828] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.712254] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.712254] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2152.712350] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2153.110764] env[62525]: DEBUG oslo_concurrency.lockutils [req-d5b2f0f3-3b82-47d0-bb3a-809818898c80 req-841fa7f8-7c50-4555-9bdb-0d321ec24872 service nova] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2153.253286] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2153.253481] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2153.253666] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2153.253863] env[62525]: DEBUG nova.objects.instance [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lazy-loading 'info_cache' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2154.977077] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.480021] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2155.480291] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2155.480482] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.480639] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.480783] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.480931] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.481110] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.481268] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.481396] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2155.481538] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.984330] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.984703] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.984703] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.984920] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2155.985746] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c58210c-b727-49ac-87e0-e5c4413985ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.994341] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55393d8-a363-4659-ab9f-8077b6c3bce8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.009311] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed415772-310f-41d4-a018-515e497eda0c {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.015609] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e1db7c-9ed5-4acb-9b7c-efd6b6948486 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.043973] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181374MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2156.044138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.044335] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.068140] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f40041a2-2cce-41c7-a9b3-44faaac49a0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2157.068415] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2157.068506] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2157.092719] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef8a73b-3dd5-46d0-bae1-b70eaf7fe27b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.100171] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25a951a-d516-4063-884b-fda998b2bc83 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.130202] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5298418e-4c9f-4a75-9487-4cfcfaf0c645 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.137163] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbba1460-7485-44c9-abac-29bea966489e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.150011] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2157.652726] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2158.158419] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2158.158849] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.114s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.708338] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.708730] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.214146] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.214359] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2186.214444] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2186.835781] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2186.836050] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2186.836202] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Forcefully refreshing network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2186.836369] env[62525]: DEBUG nova.objects.instance [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lazy-loading 'info_cache' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2188.567045] env[62525]: DEBUG nova.network.neutron [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.070125] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.070364] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated the network info_cache for instance {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2189.070684] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.070796] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.070948] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.071057] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.071174] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.071316] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.071440] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2189.071589] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.574959] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.575332] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.575381] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.575553] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2189.576460] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330d4c35-1722-4df3-a13e-e203b1aaf445 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.584813] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c52669-96f9-4018-9057-51cf7e7b2c88 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.598353] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98079e8d-0596-45d0-a580-84cdd263ee72 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.604569] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c2205b-de46-4e36-b22f-016380a347ed {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.632525] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181374MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2189.632665] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.632846] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.689687] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.689928] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.690116] env[62525]: INFO nova.compute.manager [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Shelving [ 2190.196340] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2190.196610] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07c4df73-0abf-4121-91af-b9b2bb25f88d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.203846] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2190.203846] env[62525]: value = "task-1782431" [ 2190.203846] env[62525]: _type = "Task" [ 2190.203846] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.212065] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.656571] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Instance f40041a2-2cce-41c7-a9b3-44faaac49a0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62525) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2190.656851] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2190.656920] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2190.680595] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b46918e-7862-46a8-be9c-789022419cc3 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.688218] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d62900-a059-4ecd-bfbb-a51f9519da9f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.721160] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e1e999-46b6-48f1-aa21-b5eb626a2caf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.728066] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782431, 'name': PowerOffVM_Task, 'duration_secs': 0.174955} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.729952] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2190.730689] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567ef91a-5c2e-4c1e-b77b-8cecaffa7c29 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.733706] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f372be45-e322-41a8-9291-e3272d914ae5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.746277] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2190.761782] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2190.764851] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd68a22-9c65-4a80-8ead-1d7e5c88f2f1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.268448] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2191.268764] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.636s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.274947] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Creating Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2191.275291] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3911b99e-b505-4a49-99ce-45c0e23c7489 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.282426] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2191.282426] env[62525]: value = "task-1782432" [ 2191.282426] env[62525]: _type = "Task" [ 2191.282426] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.291125] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782432, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.792550] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782432, 'name': CreateSnapshot_Task, 'duration_secs': 0.397024} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.792930] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Created Snapshot of the VM instance {{(pid=62525) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2191.793541] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e651463e-9e50-4eda-b6fe-40ba36ad9646 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.311179] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Creating linked-clone VM from snapshot {{(pid=62525) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2192.311498] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1bbd1959-2a5e-47ac-aa58-0a3ac1719cf4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.320472] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2192.320472] env[62525]: value = "task-1782433" [ 2192.320472] env[62525]: _type = "Task" [ 2192.320472] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.328024] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782433, 'name': CloneVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.830071] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782433, 'name': CloneVM_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.330776] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782433, 'name': CloneVM_Task, 'duration_secs': 0.947225} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.331052] env[62525]: INFO nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Created linked-clone VM from snapshot [ 2193.331769] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e443c3-8fe2-4f3b-8e00-175d4bc85443 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.338800] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Uploading image 959dbdf5-d5a3-4752-b03a-24c832daa620 {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2193.363033] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2193.363033] env[62525]: value = "vm-369888" [ 2193.363033] env[62525]: _type = "VirtualMachine" [ 2193.363033] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2193.363289] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-14e8103a-691e-4ee1-845e-680c549b5bae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.369951] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease: (returnval){ [ 2193.369951] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d426e-d70e-8c8f-31a6-ae31db12b2c1" [ 2193.369951] env[62525]: _type = "HttpNfcLease" [ 2193.369951] env[62525]: } obtained for exporting VM: (result){ [ 2193.369951] env[62525]: value = "vm-369888" [ 2193.369951] env[62525]: _type = "VirtualMachine" [ 2193.369951] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2193.370381] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the lease: (returnval){ [ 2193.370381] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d426e-d70e-8c8f-31a6-ae31db12b2c1" [ 2193.370381] env[62525]: _type = "HttpNfcLease" [ 2193.370381] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2193.376329] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2193.376329] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d426e-d70e-8c8f-31a6-ae31db12b2c1" [ 2193.376329] env[62525]: _type = "HttpNfcLease" [ 2193.376329] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2193.878611] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2193.878611] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d426e-d70e-8c8f-31a6-ae31db12b2c1" [ 2193.878611] env[62525]: _type = "HttpNfcLease" [ 2193.878611] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2193.879141] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2193.879141] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]524d426e-d70e-8c8f-31a6-ae31db12b2c1" [ 2193.879141] env[62525]: _type = "HttpNfcLease" [ 2193.879141] env[62525]: }. {{(pid=62525) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2193.879642] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5ad12c-29d6-477b-bfca-49b42de8b4ae {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.887121] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2193.887289] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk for reading. {{(pid=62525) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2193.973958] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d741f01a-c4b1-4aed-a4d4-689d920c2efd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.379995] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2201.380905] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df503835-6da5-426e-9ff3-d2411872f555 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.386906] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2201.387083] env[62525]: ERROR oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk due to incomplete transfer. [ 2201.387293] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e5d1515a-b07b-4d48-bb85-7e0d7b29dac7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.394897] env[62525]: DEBUG oslo_vmware.rw_handles [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bffd05-b4bc-ef62-abc8-c89052468e11/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2201.395099] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Uploaded image 959dbdf5-d5a3-4752-b03a-24c832daa620 to the Glance image server {{(pid=62525) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2201.397249] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Destroying the VM {{(pid=62525) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2201.397485] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-81a80260-9c39-4290-b2d4-bc2e9ac7ee63 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.404654] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2201.404654] env[62525]: value = "task-1782435" [ 2201.404654] env[62525]: _type = "Task" [ 2201.404654] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.411821] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782435, 'name': Destroy_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.914461] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782435, 'name': Destroy_Task, 'duration_secs': 0.352416} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.914760] env[62525]: INFO nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Destroyed the VM [ 2201.914955] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleting Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2201.915212] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d9cf8211-ebb4-4483-9013-06788078248e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.921435] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2201.921435] env[62525]: value = "task-1782436" [ 2201.921435] env[62525]: _type = "Task" [ 2201.921435] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.928687] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782436, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.430434] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782436, 'name': RemoveSnapshot_Task, 'duration_secs': 0.368618} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.430887] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleted Snapshot of the VM instance {{(pid=62525) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2202.430940] env[62525]: DEBUG nova.compute.manager [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2202.431696] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c67d82-725e-473a-9cbc-15fd431e7095 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.944351] env[62525]: INFO nova.compute.manager [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Shelve offloading [ 2202.946030] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2202.946281] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70ee456b-f571-4ec6-a1c2-ab8096c95206 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.953367] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2202.953367] env[62525]: value = "task-1782437" [ 2202.953367] env[62525]: _type = "Task" [ 2202.953367] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.961137] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.464026] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] VM already powered off {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2203.464397] env[62525]: DEBUG nova.compute.manager [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2203.464962] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1907fd-2208-4990-bc3c-008cc96144c9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.470243] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.470406] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.470574] env[62525]: DEBUG nova.network.neutron [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2204.171352] env[62525]: DEBUG nova.network.neutron [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.673745] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.933951] env[62525]: DEBUG nova.compute.manager [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-vif-unplugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2204.934169] env[62525]: DEBUG oslo_concurrency.lockutils [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.934363] env[62525]: DEBUG oslo_concurrency.lockutils [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.934536] env[62525]: DEBUG oslo_concurrency.lockutils [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.934700] env[62525]: DEBUG nova.compute.manager [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] No waiting events found dispatching network-vif-unplugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2204.934863] env[62525]: WARNING nova.compute.manager [req-37ab5c52-a5b9-4b42-aa83-1e32c85819ef req-c30f9c7a-eb5f-4e3c-ae19-3f7d1631bd71 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received unexpected event network-vif-unplugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a for instance with vm_state shelved and task_state shelving_offloading. [ 2205.009524] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2205.010431] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10d8e40-43ea-434f-a89a-e4c8e04590a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.017843] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2205.018085] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78097f39-c154-49c3-8cd1-60ddced46ca2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.959275] env[62525]: DEBUG nova.compute.manager [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2206.959513] env[62525]: DEBUG nova.compute.manager [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing instance network info cache due to event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2206.959672] env[62525]: DEBUG oslo_concurrency.lockutils [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.959817] env[62525]: DEBUG oslo_concurrency.lockutils [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.959977] env[62525]: DEBUG nova.network.neutron [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2207.654921] env[62525]: DEBUG nova.network.neutron [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated VIF entry in instance network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2207.655288] env[62525]: DEBUG nova.network.neutron [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf1baff8b-92", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2208.157741] env[62525]: DEBUG oslo_concurrency.lockutils [req-4e10e822-96ed-44e0-8877-d36e40958381 req-bc800bcb-3a23-43a1-b6ae-666755c5fb6d service nova] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.125734] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2209.125964] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2209.126164] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleting the datastore file [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2209.126444] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcc9eb64-8552-46fc-b62b-63ef1b4169e0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.132529] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2209.132529] env[62525]: value = "task-1782439" [ 2209.132529] env[62525]: _type = "Task" [ 2209.132529] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.140295] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.643299] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.143482] env[62525]: DEBUG oslo_vmware.api [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.926734} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.143761] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2210.143964] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2210.144157] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2210.164963] env[62525]: INFO nova.scheduler.client.report [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted allocations for instance f40041a2-2cce-41c7-a9b3-44faaac49a0f [ 2210.669383] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.669701] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.669887] env[62525]: DEBUG nova.objects.instance [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'resources' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2211.172161] env[62525]: DEBUG nova.objects.instance [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'numa_topology' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2211.651742] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.674977] env[62525]: DEBUG nova.objects.base [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62525) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2211.700859] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b092c63-5503-40b8-bec0-0ae8bc97e9c1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.709046] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9188bf-9f56-4af3-b369-54dab31c788e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.739752] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bbd419-65ad-4244-8930-c301ebc3d770 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.746913] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0429cacb-8ae3-413a-801a-1c23c0616658 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.759793] env[62525]: DEBUG nova.compute.provider_tree [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2212.262818] env[62525]: DEBUG nova.scheduler.client.report [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2212.769149] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.277259] env[62525]: DEBUG oslo_concurrency.lockutils [None req-106da370-6d23-46f4-af14-6d119ff50a5c tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.587s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.278138] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.626s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2213.278328] env[62525]: INFO nova.compute.manager [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Unshelving [ 2214.300446] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.300759] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.300906] env[62525]: DEBUG nova.objects.instance [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'pci_requests' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2214.805068] env[62525]: DEBUG nova.objects.instance [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'numa_topology' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2215.309054] env[62525]: INFO nova.compute.claims [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2216.341907] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f09fee-5c45-4818-8d6e-c9968841ff2b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.350574] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c45684-2544-4bab-812a-a45582122da0 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.379743] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1031aa-d74c-4c85-be92-db0d9ac11d2a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.386761] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acca55f-6302-431f-9b1a-4668ee457657 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.400195] env[62525]: DEBUG nova.compute.provider_tree [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2216.903666] env[62525]: DEBUG nova.scheduler.client.report [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2217.408982] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.108s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.438267] env[62525]: INFO nova.network.neutron [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2218.809729] env[62525]: DEBUG nova.compute.manager [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2218.810135] env[62525]: DEBUG oslo_concurrency.lockutils [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.810599] env[62525]: DEBUG oslo_concurrency.lockutils [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.810599] env[62525]: DEBUG oslo_concurrency.lockutils [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.810736] env[62525]: DEBUG nova.compute.manager [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] No waiting events found dispatching network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2218.810924] env[62525]: WARNING nova.compute.manager [req-b2914a55-0dcf-450a-8c2d-bf0e5808aa53 req-fe567cfd-5706-412d-8be5-c708b83b7719 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received unexpected event network-vif-plugged-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a for instance with vm_state shelved_offloaded and task_state spawning. [ 2218.893754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.893754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.893754] env[62525]: DEBUG nova.network.neutron [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Building network info cache for instance {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2219.595799] env[62525]: DEBUG nova.network.neutron [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2220.098754] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.125125] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:54:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9fe7c302ed3d11a533c0ee82c2e06b3e',container_format='bare',created_at=2024-12-12T00:24:33Z,direct_url=,disk_format='vmdk',id=959dbdf5-d5a3-4752-b03a-24c832daa620,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-2101087949-shelved',owner='6dbd20742b0f42d5ac04268223bfe911',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2024-12-12T00:24:45Z,virtual_size=,visibility=), allow threads: False {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2220.125401] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2220.125561] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image limits 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2220.125737] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Flavor pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2220.125880] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Image pref 0:0:0 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2220.126036] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62525) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2220.126246] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2220.126404] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2220.126564] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Got 1 possible topologies {{(pid=62525) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2220.126721] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2220.126887] env[62525]: DEBUG nova.virt.hardware [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62525) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2220.128102] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6ad2df-80df-4696-92d7-4dcef08e033a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.136407] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39745518-58cd-4bbb-8aad-09fb19d365b2 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.149378] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:94:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1baff8b-9244-418b-bb8a-12ffcfcb7f3a', 'vif_model': 'vmxnet3'}] {{(pid=62525) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2220.156587] env[62525]: DEBUG oslo.service.loopingcall [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2220.156797] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Creating VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2220.156987] env[62525]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ce8d45c-a247-43c8-9517-b0ad8056f30b {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.174935] env[62525]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2220.174935] env[62525]: value = "task-1782440" [ 2220.174935] env[62525]: _type = "Task" [ 2220.174935] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.181965] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782440, 'name': CreateVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.685166] env[62525]: DEBUG oslo_vmware.api [-] Task: {'id': task-1782440, 'name': CreateVM_Task, 'duration_secs': 0.356135} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.685326] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Created VM on the ESX host {{(pid=62525) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2220.685964] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.686157] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.686585] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2220.686823] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b643eed-7180-45b6-81ff-75e6e0f77501 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.691077] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2220.691077] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52db62dd-2be8-0755-de57-4db6c2594763" [ 2220.691077] env[62525]: _type = "Task" [ 2220.691077] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.698214] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52db62dd-2be8-0755-de57-4db6c2594763, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.836010] env[62525]: DEBUG nova.compute.manager [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2220.836247] env[62525]: DEBUG nova.compute.manager [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing instance network info cache due to event network-changed-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2220.836484] env[62525]: DEBUG oslo_concurrency.lockutils [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] Acquiring lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.836629] env[62525]: DEBUG oslo_concurrency.lockutils [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] Acquired lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.836788] env[62525]: DEBUG nova.network.neutron [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Refreshing network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2221.200727] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]52db62dd-2be8-0755-de57-4db6c2594763, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.546854] env[62525]: DEBUG nova.network.neutron [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updated VIF entry in instance network info cache for port f1baff8b-9244-418b-bb8a-12ffcfcb7f3a. {{(pid=62525) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2221.547248] env[62525]: DEBUG nova.network.neutron [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [{"id": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "address": "fa:16:3e:f4:94:30", "network": {"id": "ab805007-f547-4f1c-8d01-40ef7beedaa7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1875379386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6dbd20742b0f42d5ac04268223bfe911", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1baff8b-92", "ovs_interfaceid": "f1baff8b-9244-418b-bb8a-12ffcfcb7f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.702095] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2221.702356] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Processing image 959dbdf5-d5a3-4752-b03a-24c832daa620 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2221.702590] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.702739] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquired lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.702914] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2221.703178] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c3e5e64-a7ff-4e3a-9a24-abcb8260f86a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.917061] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2221.917241] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62525) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2221.918048] env[62525]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfd761d9-a066-4174-892f-dcff8a659334 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.923694] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2221.923694] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]5292332f-978b-57b6-6849-f1fe08f96002" [ 2221.923694] env[62525]: _type = "Task" [ 2221.923694] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.930900] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': session[52912505-83d3-c6c8-239b-e663f6298abd]5292332f-978b-57b6-6849-f1fe08f96002, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.049785] env[62525]: DEBUG oslo_concurrency.lockutils [req-719b7054-3d81-453f-bf40-18c79c4d140d req-08cc8329-ce4a-4318-864f-530bc637c227 service nova] Releasing lock "refresh_cache-f40041a2-2cce-41c7-a9b3-44faaac49a0f" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.435431] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Preparing fetch location {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2222.435811] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Fetch image to [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5/OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5.vmdk {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2222.435898] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Downloading stream optimized image 959dbdf5-d5a3-4752-b03a-24c832daa620 to [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5/OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5.vmdk on the data store datastore1 as vApp {{(pid=62525) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2222.436021] env[62525]: DEBUG nova.virt.vmwareapi.images [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Downloading image file data 959dbdf5-d5a3-4752-b03a-24c832daa620 to the ESX as VM named 'OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5' {{(pid=62525) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2222.500580] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2222.500580] env[62525]: value = "resgroup-9" [ 2222.500580] env[62525]: _type = "ResourcePool" [ 2222.500580] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2222.500858] env[62525]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6a078581-71aa-42d5-858e-226493023493 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.520706] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease: (returnval){ [ 2222.520706] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d44167-0c5e-83c3-247c-33892e60ce9f" [ 2222.520706] env[62525]: _type = "HttpNfcLease" [ 2222.520706] env[62525]: } obtained for vApp import into resource pool (val){ [ 2222.520706] env[62525]: value = "resgroup-9" [ 2222.520706] env[62525]: _type = "ResourcePool" [ 2222.520706] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2222.521029] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the lease: (returnval){ [ 2222.521029] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d44167-0c5e-83c3-247c-33892e60ce9f" [ 2222.521029] env[62525]: _type = "HttpNfcLease" [ 2222.521029] env[62525]: } to be ready. {{(pid=62525) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2222.527397] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2222.527397] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d44167-0c5e-83c3-247c-33892e60ce9f" [ 2222.527397] env[62525]: _type = "HttpNfcLease" [ 2222.527397] env[62525]: } is initializing. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2223.029185] env[62525]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2223.029185] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d44167-0c5e-83c3-247c-33892e60ce9f" [ 2223.029185] env[62525]: _type = "HttpNfcLease" [ 2223.029185] env[62525]: } is ready. {{(pid=62525) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2223.029667] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2223.029667] env[62525]: value = "session[52912505-83d3-c6c8-239b-e663f6298abd]52d44167-0c5e-83c3-247c-33892e60ce9f" [ 2223.029667] env[62525]: _type = "HttpNfcLease" [ 2223.029667] env[62525]: }. {{(pid=62525) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2223.030155] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d62740-e3b4-495c-8caf-2b68825af8a5 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.036726] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk from lease info. {{(pid=62525) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2223.036893] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk. {{(pid=62525) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2223.099336] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ce1dd3b0-5ec0-49e5-bbed-f6c37627477e {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.199597] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Completed reading data from the image iterator. {{(pid=62525) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2224.200072] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2224.200733] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada98767-9826-4883-83b3-0f60fbf3d07a {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.207820] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk is in state: ready. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2224.207975] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk. {{(pid=62525) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2224.208204] env[62525]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-309cfd7b-08b9-48a8-b782-398b1413a168 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.414498] env[62525]: DEBUG oslo_vmware.rw_handles [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523aceec-33b2-a45f-c163-c88592cb7a5d/disk-0.vmdk. {{(pid=62525) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2224.414700] env[62525]: INFO nova.virt.vmwareapi.images [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Downloaded image file data 959dbdf5-d5a3-4752-b03a-24c832daa620 [ 2224.415608] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df065695-f1a2-45fb-8532-b7b03b8f1cde {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.431374] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55718235-4c64-4935-ad2a-cf4c6e3a37e7 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.453435] env[62525]: INFO nova.virt.vmwareapi.images [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] The imported VM was unregistered [ 2224.455702] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Caching image {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2224.455928] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Creating directory with path [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2224.456198] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d52a6410-c5d2-489f-9046-a5a211dbdff9 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.474948] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Created directory with path [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620 {{(pid=62525) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2224.475201] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5/OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5.vmdk to [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk. {{(pid=62525) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2224.475504] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-996d66e3-c651-4fc6-a66d-ff8203dfaba4 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.481415] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2224.481415] env[62525]: value = "task-1782443" [ 2224.481415] env[62525]: _type = "Task" [ 2224.481415] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.488724] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.993626] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.498362] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.995689] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.494437] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.994600] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782443, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.331238} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2226.995022] env[62525]: INFO nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5/OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5.vmdk to [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk. [ 2226.995120] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Cleaning up location [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5 {{(pid=62525) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2226.995249] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_eb1ce376-4391-47c3-bddc-cc3d3a6718f5 {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2226.995514] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abfc96d8-9e34-496d-9483-12768d2cf8a1 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.001919] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2227.001919] env[62525]: value = "task-1782444" [ 2227.001919] env[62525]: _type = "Task" [ 2227.001919] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.009565] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.512263] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036236} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.512514] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2227.512684] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Releasing lock "[datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk" {{(pid=62525) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2227.512927] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk to [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2227.513200] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-212f637a-4dc4-4eec-89d1-d769da2759e8 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.520016] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2227.520016] env[62525]: value = "task-1782445" [ 2227.520016] env[62525]: _type = "Task" [ 2227.520016] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.527077] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.032348] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.532232] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.033947] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.534806] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.035311] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782445, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.112411} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.035698] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/959dbdf5-d5a3-4752-b03a-24c832daa620/959dbdf5-d5a3-4752-b03a-24c832daa620.vmdk to [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk {{(pid=62525) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2230.036183] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0e7fd5-7d6c-4893-a11a-0d78be2f0d96 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.057830] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2230.058116] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e4ab938-c95e-40df-80ed-9f9463af845d {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.076835] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2230.076835] env[62525]: value = "task-1782446" [ 2230.076835] env[62525]: _type = "Task" [ 2230.076835] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.084554] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.587078] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782446, 'name': ReconfigVM_Task, 'duration_secs': 0.324536} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.587383] env[62525]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Reconfigured VM instance instance-0000007c to attach disk [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f/f40041a2-2cce-41c7-a9b3-44faaac49a0f.vmdk or device None with type streamOptimized {{(pid=62525) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2230.588014] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10caf0f2-ddd9-481f-bfc2-918303495134 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.594158] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2230.594158] env[62525]: value = "task-1782447" [ 2230.594158] env[62525]: _type = "Task" [ 2230.594158] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.601367] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782447, 'name': Rename_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.104548] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782447, 'name': Rename_Task, 'duration_secs': 0.135466} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.104899] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powering on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2231.105070] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f7bdfec-9e30-4f94-b2bf-d45a357a1395 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.111211] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2231.111211] env[62525]: value = "task-1782448" [ 2231.111211] env[62525]: _type = "Task" [ 2231.111211] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.118425] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.621209] env[62525]: DEBUG oslo_vmware.api [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782448, 'name': PowerOnVM_Task, 'duration_secs': 0.418723} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.621469] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powered on the VM {{(pid=62525) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2231.711742] env[62525]: DEBUG nova.compute.manager [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Checking state {{(pid=62525) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2231.712677] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b176a6ab-55d9-4fd7-8b44-26076427e4be {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.229150] env[62525]: DEBUG oslo_concurrency.lockutils [None req-3c6652b3-d57a-4304-8ed6-8cbb396392bc tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.951s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.412828] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.413208] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.413343] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2233.413528] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2233.413706] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.417314] env[62525]: INFO nova.compute.manager [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Terminating instance [ 2233.418983] env[62525]: DEBUG nova.compute.manager [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Start destroying the instance on the hypervisor. {{(pid=62525) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2233.419191] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Destroying instance {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2233.420015] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a7bba5-df73-44dc-bd98-3c24a776c66f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.427947] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powering off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2233.428175] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e3c9444-324c-4970-a029-dc3030598269 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.433556] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2233.433556] env[62525]: value = "task-1782449" [ 2233.433556] env[62525]: _type = "Task" [ 2233.433556] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.443051] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.944282] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782449, 'name': PowerOffVM_Task, 'duration_secs': 0.16521} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2233.944553] env[62525]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Powered off the VM {{(pid=62525) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2233.944719] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Unregistering the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2233.944961] env[62525]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60aae9ff-306a-4379-a660-b06c12cb42ab {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.011480] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Unregistered the VM {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2234.011703] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleting contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2234.011869] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleting the datastore file [datastore1] f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2234.012154] env[62525]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-623c45ed-4e17-44bb-a849-9adda910d844 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.018476] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for the task: (returnval){ [ 2234.018476] env[62525]: value = "task-1782451" [ 2234.018476] env[62525]: _type = "Task" [ 2234.018476] env[62525]: } to complete. {{(pid=62525) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.026036] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.528436] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.029152] env[62525]: DEBUG oslo_vmware.api [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Task: {'id': task-1782451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.574151} completed successfully. {{(pid=62525) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2235.029444] env[62525]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted the datastore file {{(pid=62525) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2235.029631] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deleted contents of the VM from datastore datastore1 {{(pid=62525) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2235.029802] env[62525]: DEBUG nova.virt.vmwareapi.vmops [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Instance destroyed {{(pid=62525) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2235.029974] env[62525]: INFO nova.compute.manager [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Took 1.61 seconds to destroy the instance on the hypervisor. [ 2235.030233] env[62525]: DEBUG oslo.service.loopingcall [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62525) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2235.030424] env[62525]: DEBUG nova.compute.manager [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Deallocating network for instance {{(pid=62525) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2235.030515] env[62525]: DEBUG nova.network.neutron [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] deallocate_for_instance() {{(pid=62525) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2235.617762] env[62525]: DEBUG nova.compute.manager [req-6b2be17e-2493-4996-9571-c64e66efe6c8 req-e1963fb1-b0cf-48fb-99ea-8d0351635489 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Received event network-vif-deleted-f1baff8b-9244-418b-bb8a-12ffcfcb7f3a {{(pid=62525) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2235.617991] env[62525]: INFO nova.compute.manager [req-6b2be17e-2493-4996-9571-c64e66efe6c8 req-e1963fb1-b0cf-48fb-99ea-8d0351635489 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Neutron deleted interface f1baff8b-9244-418b-bb8a-12ffcfcb7f3a; detaching it from the instance and deleting it from the info cache [ 2235.618165] env[62525]: DEBUG nova.network.neutron [req-6b2be17e-2493-4996-9571-c64e66efe6c8 req-e1963fb1-b0cf-48fb-99ea-8d0351635489 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.098444] env[62525]: DEBUG nova.network.neutron [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Updating instance_info_cache with network_info: [] {{(pid=62525) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.121331] env[62525]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b7c5307-496c-4d0b-b5ef-55fb91fa6dcf {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.130942] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1772d3-7221-49cd-be44-a030968e5483 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.154272] env[62525]: DEBUG nova.compute.manager [req-6b2be17e-2493-4996-9571-c64e66efe6c8 req-e1963fb1-b0cf-48fb-99ea-8d0351635489 service nova] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Detach interface failed, port_id=f1baff8b-9244-418b-bb8a-12ffcfcb7f3a, reason: Instance f40041a2-2cce-41c7-a9b3-44faaac49a0f could not be found. {{(pid=62525) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2236.601430] env[62525]: INFO nova.compute.manager [-] [instance: f40041a2-2cce-41c7-a9b3-44faaac49a0f] Took 1.57 seconds to deallocate network for instance. [ 2237.107962] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.108362] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.108481] env[62525]: DEBUG nova.objects.instance [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lazy-loading 'resources' on Instance uuid f40041a2-2cce-41c7-a9b3-44faaac49a0f {{(pid=62525) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2237.641771] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1d8d94-34d7-4497-97f8-4c5013586f90 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.649727] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be990f0-c2a2-4d44-983c-8f2dca97a8fd {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.680147] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0deb0adf-12a5-4592-8576-6ee2d3dc5049 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.687221] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97108e9-2c47-4dd8-b3e9-240318189150 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.700176] env[62525]: DEBUG nova.compute.provider_tree [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2238.203343] env[62525]: DEBUG nova.scheduler.client.report [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2238.708680] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.728436] env[62525]: INFO nova.scheduler.client.report [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Deleted allocations for instance f40041a2-2cce-41c7-a9b3-44faaac49a0f [ 2239.236096] env[62525]: DEBUG oslo_concurrency.lockutils [None req-cfb6dede-b744-4bbb-972a-0f721b387c2a tempest-ServerActionsTestOtherB-1532649929 tempest-ServerActionsTestOtherB-1532649929-project-member] Lock "f40041a2-2cce-41c7-a9b3-44faaac49a0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.823s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.270022] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.270431] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.270431] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Starting heal instance info cache {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2251.270600] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Rebuilding the list of instances to heal {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2251.774247] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Didn't find any instances for network info cache update. {{(pid=62525) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2251.774482] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.774685] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.774833] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.775041] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.775212] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.775362] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.775490] env[62525]: DEBUG nova.compute.manager [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62525) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2251.775628] env[62525]: DEBUG oslo_service.periodic_task [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Running periodic task ComputeManager.update_available_resource {{(pid=62525) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.279129] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.279529] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.279529] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.279693] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62525) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2252.280599] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e870f7ce-7ebc-49b6-9fd8-be7f3555b232 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.288949] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f58cf3e-7371-4740-9147-87245d380587 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.302452] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439818f4-d2dc-46b8-8f0a-52c5981ec10f {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.308375] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3561c3-d358-400a-ba06-f9c583a7a827 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.336199] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181235MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=62525) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2252.336332] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.336513] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.356365] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2253.356620] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62525) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2253.370272] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f66b65b-7004-44d8-b24a-661257984293 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.377720] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf1fdfc-fd6d-4980-b4a2-36b049bf0968 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.406412] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9da221-cab3-48a7-8897-76235b60f350 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.412916] env[62525]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4292cf4a-77b9-47bd-9d35-3feab0d5dd15 {{(pid=62525) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.425379] env[62525]: DEBUG nova.compute.provider_tree [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed in ProviderTree for provider: bb89c0ac-8f56-43c6-9f73-fd897be63424 {{(pid=62525) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2253.927989] env[62525]: DEBUG nova.scheduler.client.report [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Inventory has not changed for provider bb89c0ac-8f56-43c6-9f73-fd897be63424 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62525) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2254.432870] env[62525]: DEBUG nova.compute.resource_tracker [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62525) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2254.433268] env[62525]: DEBUG oslo_concurrency.lockutils [None req-82d63983-f77d-48d8-b64b-0874bf58e3cc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=62525) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}